llama_cpp 0.3.6 → 0.3.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/ext/llama_cpp/src/ggml-alloc.c +8 -0
- data/ext/llama_cpp/src/ggml-cuda.cu +1165 -721
- data/ext/llama_cpp/src/ggml-metal.m +39 -18
- data/ext/llama_cpp/src/ggml.c +396 -150
- data/ext/llama_cpp/src/ggml.h +113 -32
- data/ext/llama_cpp/src/llama-util.h +41 -1
- data/ext/llama_cpp/src/llama.cpp +214 -146
- data/ext/llama_cpp/src/llama.h +18 -1
- data/lib/llama_cpp/version.rb +2 -2
- metadata +2 -2
data/ext/llama_cpp/src/llama.h
CHANGED
@@ -86,7 +86,20 @@ extern "C" {
|
|
86
86
|
|
87
87
|
typedef void (*llama_progress_callback)(float progress, void *ctx);
|
88
88
|
|
89
|
-
|
89
|
+
enum llama_log_level {
|
90
|
+
LLAMA_LOG_LEVEL_ERROR = 2,
|
91
|
+
LLAMA_LOG_LEVEL_WARN = 3,
|
92
|
+
LLAMA_LOG_LEVEL_INFO = 4
|
93
|
+
};
|
94
|
+
|
95
|
+
// Signature for logging events
|
96
|
+
// Note that text includes the new line character at the end for most events.
|
97
|
+
// If your logging mechanism cannot handle that, check if the last character is '\n' and strip it
|
98
|
+
// if it exists.
|
99
|
+
// It might not exist for progress report where '.' is output repeatedly.
|
100
|
+
typedef void (*llama_log_callback)(llama_log_level level, const char * text, void * user_data);
|
101
|
+
|
102
|
+
struct llama_context_params {
|
90
103
|
uint32_t seed; // RNG seed, -1 for random
|
91
104
|
int32_t n_ctx; // text context
|
92
105
|
int32_t n_batch; // prompt processing batch size
|
@@ -195,6 +208,10 @@ extern "C" {
|
|
195
208
|
int32_t n_eval;
|
196
209
|
};
|
197
210
|
|
211
|
+
// Set callback for all future logging events.
|
212
|
+
// If this is not called, or NULL is supplied, everything is output on stderr.
|
213
|
+
LLAMA_API void llama_log_set(llama_log_callback log_callback, void * user_data);
|
214
|
+
|
198
215
|
LLAMA_API int llama_max_devices();
|
199
216
|
|
200
217
|
LLAMA_API struct llama_context_params llama_context_default_params();
|
data/lib/llama_cpp/version.rb
CHANGED
@@ -3,8 +3,8 @@
|
|
3
3
|
# llama_cpp.rb provides Ruby bindings for the llama.cpp.
|
4
4
|
module LLaMACpp
|
5
5
|
# The version of llama_cpp.rb you install.
|
6
|
-
VERSION = '0.3.
|
6
|
+
VERSION = '0.3.7'
|
7
7
|
|
8
8
|
# The version of llama.cpp bundled with llama_cpp.rb.
|
9
|
-
LLAMA_CPP_VERSION = 'master-
|
9
|
+
LLAMA_CPP_VERSION = 'master-9ca4abe'
|
10
10
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: llama_cpp
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.3.
|
4
|
+
version: 0.3.7
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- yoshoku
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-08-
|
11
|
+
date: 2023-08-11 00:00:00.000000000 Z
|
12
12
|
dependencies: []
|
13
13
|
description: llama_cpp.rb provides Ruby bindings for the llama.cpp.
|
14
14
|
email:
|