cui-llama.rn 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +10 -1
- package/android/src/main/CMakeLists.txt +22 -19
- package/android/src/main/java/com/rnllama/LlamaContext.java +62 -20
- package/cpp/common.cpp +4 -11
- package/cpp/common.h +1 -1
- package/cpp/ggml-aarch64.c +2193 -2193
- package/cpp/ggml-aarch64.h +39 -39
- package/cpp/ggml-alloc.c +1042 -1041
- package/cpp/ggml-backend-impl.h +153 -153
- package/cpp/ggml-backend.c +2234 -2225
- package/cpp/ggml-backend.h +238 -236
- package/cpp/ggml-common.h +1829 -1829
- package/cpp/ggml-impl.h +655 -655
- package/cpp/ggml-metal.h +65 -65
- package/cpp/ggml-metal.m +3269 -3273
- package/cpp/ggml-quants.c +14860 -15022
- package/cpp/ggml-quants.h +132 -132
- package/cpp/ggml.c +16 -6
- package/cpp/ggml.h +2447 -2444
- package/cpp/llama.cpp +634 -531
- package/cpp/llama.h +30 -14
- package/cpp/log.h +737 -737
- package/cpp/rn-llama.hpp +9 -1
- package/cpp/sampling.cpp +460 -460
- package/cpp/sgemm.cpp +1027 -1027
- package/cpp/sgemm.h +14 -14
- package/package.json +1 -1
package/cpp/rn-llama.hpp
CHANGED
@@ -253,9 +253,17 @@ struct llama_rn_context
|
|
253
253
|
|
254
254
|
void loadPrompt()
|
255
255
|
{
|
256
|
-
std::vector<llama_token> prompt_tokens = ::llama_tokenize(ctx, params.prompt, true);
|
256
|
+
std::vector<llama_token> prompt_tokens = ::llama_tokenize(ctx, params.prompt, true, true);
|
257
257
|
num_prompt_tokens = prompt_tokens.size();
|
258
258
|
|
259
|
+
// LOG tokens
|
260
|
+
std::stringstream ss;
|
261
|
+
ss << "\n" << __func__ << ": prompt_tokens = ";
|
262
|
+
for (auto& token : prompt_tokens) {
|
263
|
+
ss << token << " ";
|
264
|
+
}
|
265
|
+
LOG_INFO("%s\n", ss.str().c_str());
|
266
|
+
|
259
267
|
if (params.n_keep < 0)
|
260
268
|
{
|
261
269
|
params.n_keep = (int)num_prompt_tokens;
|