cui-llama.rn 1.3.4 → 1.3.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/android/src/main/CMakeLists.txt +14 -8
  2. package/android/src/main/jni.cpp +38 -37
  3. package/cpp/common.cpp +50 -30
  4. package/cpp/common.h +32 -13
  5. package/cpp/ggml-alloc.c +0 -1
  6. package/cpp/ggml-backend-reg.cpp +79 -49
  7. package/cpp/ggml-backend.cpp +5 -2
  8. package/cpp/ggml-cpp.h +1 -0
  9. package/cpp/ggml-cpu-aarch64.cpp +57 -72
  10. package/cpp/ggml-cpu-quants.c +5 -1
  11. package/cpp/ggml-cpu.c +6 -6
  12. package/cpp/ggml-cpu.cpp +9 -0
  13. package/cpp/ggml-impl.h +11 -0
  14. package/cpp/ggml-metal.m +2 -2
  15. package/cpp/ggml.c +129 -1388
  16. package/cpp/ggml.h +29 -152
  17. package/cpp/gguf.cpp +1325 -0
  18. package/cpp/gguf.h +202 -0
  19. package/cpp/llama-adapter.cpp +346 -0
  20. package/cpp/llama-adapter.h +73 -0
  21. package/cpp/llama-arch.cpp +1434 -0
  22. package/cpp/llama-arch.h +395 -0
  23. package/cpp/llama-batch.cpp +368 -0
  24. package/cpp/llama-batch.h +88 -0
  25. package/cpp/llama-chat.cpp +567 -0
  26. package/cpp/llama-chat.h +51 -0
  27. package/cpp/llama-context.cpp +1771 -0
  28. package/cpp/llama-context.h +128 -0
  29. package/cpp/llama-cparams.cpp +1 -0
  30. package/cpp/llama-cparams.h +37 -0
  31. package/cpp/llama-cpp.h +30 -0
  32. package/cpp/llama-grammar.cpp +16 -15
  33. package/cpp/llama-grammar.h +5 -6
  34. package/cpp/llama-hparams.cpp +71 -0
  35. package/cpp/llama-hparams.h +140 -0
  36. package/cpp/llama-impl.cpp +167 -0
  37. package/cpp/llama-impl.h +16 -136
  38. package/cpp/llama-kv-cache.cpp +718 -0
  39. package/cpp/llama-kv-cache.h +218 -0
  40. package/cpp/llama-mmap.cpp +589 -0
  41. package/cpp/llama-mmap.h +67 -0
  42. package/cpp/llama-model-loader.cpp +1011 -0
  43. package/cpp/llama-model-loader.h +158 -0
  44. package/cpp/llama-model.cpp +2202 -0
  45. package/cpp/llama-model.h +391 -0
  46. package/cpp/llama-sampling.cpp +117 -4
  47. package/cpp/llama-vocab.cpp +26 -29
  48. package/cpp/llama-vocab.h +14 -2
  49. package/cpp/llama.cpp +8839 -19131
  50. package/cpp/llama.cpp.rej +23 -0
  51. package/cpp/llama.h +31 -9
  52. package/cpp/rn-llama.hpp +39 -37
  53. package/cpp/sgemm.cpp +1091 -378
  54. package/cpp/sgemm.h +2 -2
  55. package/cpp/unicode.cpp +6 -0
  56. package/package.json +1 -1
package/cpp/sgemm.h CHANGED
@@ -5,8 +5,8 @@
5
5
  extern "C" {
6
6
  #endif
7
7
 
8
- bool llamafile_sgemm(int64_t, int64_t, int64_t, const void *, int64_t,
9
- const void *, int64_t, void *, int64_t, int, int,
8
+ bool llamafile_sgemm(const struct lm_lm_ggml_compute_params * params, int64_t, int64_t, int64_t,
9
+ const void *, int64_t, const void *, int64_t, void *, int64_t,
10
10
  int, int, int);
11
11
 
12
12
  #ifdef __cplusplus
package/cpp/unicode.cpp CHANGED
@@ -667,18 +667,24 @@ std::vector<std::string> unicode_regex_split(const std::string & text, const std
667
667
  { "\\p{N}", unicode_cpt_flags::NUMBER },
668
668
  { "\\p{L}", unicode_cpt_flags::LETTER },
669
669
  { "\\p{P}", unicode_cpt_flags::PUNCTUATION },
670
+ { "\\p{M}", unicode_cpt_flags::ACCENT_MARK },
671
+ { "\\p{S}", unicode_cpt_flags::SYMBOL },
670
672
  };
671
673
 
672
674
  static const std::map<int, int> k_ucat_cpt = {
673
675
  { unicode_cpt_flags::NUMBER, 0xD1 },
674
676
  { unicode_cpt_flags::LETTER, 0xD2 },
675
677
  { unicode_cpt_flags::PUNCTUATION, 0xD3 },
678
+ { unicode_cpt_flags::ACCENT_MARK, 0xD4 },
679
+ { unicode_cpt_flags::SYMBOL, 0xD5 },
676
680
  };
677
681
 
678
682
  static const std::map<int, std::string> k_ucat_map = {
679
683
  { unicode_cpt_flags::NUMBER, "\x30-\x39" }, // 0-9
680
684
  { unicode_cpt_flags::LETTER, "\x41-\x5A\x61-\x7A" }, // A-Za-z
681
685
  { unicode_cpt_flags::PUNCTUATION, "\x21-\x23\x25-\x2A\x2C-\x2F\x3A-\x3B\x3F-\x40\\\x5B-\\\x5D\x5F\\\x7B\\\x7D" }, // !-#%-*,-/:-;?-@\[-\]_\{\}
686
+ { unicode_cpt_flags::ACCENT_MARK, "" }, // no sub-128 codepoints
687
+ { unicode_cpt_flags::SYMBOL, "\\\x24\\\x2B\x3C-\x3E\x5E\x60\\\x7C" }, // $+<=>^`|
682
688
  };
683
689
 
684
690
  // compute collapsed codepoints only if needed by at least one regex
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "cui-llama.rn",
3
- "version": "1.3.4",
3
+ "version": "1.3.6",
4
4
  "description": "Fork of llama.rn for ChatterUI",
5
5
  "main": "lib/commonjs/index",
6
6
  "module": "lib/module/index",