cui-llama.rn 0.2.0 → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -53,6 +53,8 @@ public class LlamaContext {
53
53
  params.hasKey("use_mlock") ? params.getBoolean("use_mlock") : true,
54
54
  // boolean use_mmap,
55
55
  params.hasKey("use_mmap") ? params.getBoolean("use_mmap") : true,
56
+ //boolean vocab_only,
57
+ params.hasKey("vocab_only") ? params.getBoolean("vocab_only") : false,
56
58
  // String lora,
57
59
  params.hasKey("lora") ? params.getString("lora") : "",
58
60
  // float lora_scaled,
@@ -297,6 +299,7 @@ public class LlamaContext {
297
299
  int n_gpu_layers, // TODO: Support this
298
300
  boolean use_mlock,
299
301
  boolean use_mmap,
302
+ boolean vocab_only,
300
303
  String lora,
301
304
  float lora_scaled,
302
305
  String lora_base,
@@ -221,7 +221,7 @@ public class RNLlama implements LifecycleEventListener {
221
221
  tasks.put(task, "stopCompletion-" + contextId);
222
222
  }
223
223
 
224
- public void tokenize(double id, final String text, final Promise promise) {
224
+ public void tokenizeAsync(double id, final String text, final Promise promise) {
225
225
  final int contextId = (int) id;
226
226
  AsyncTask task = new AsyncTask<Void, Void, WritableMap>() {
227
227
  private Exception exception;
@@ -253,6 +253,16 @@ public class RNLlama implements LifecycleEventListener {
253
253
  tasks.put(task, "tokenize-" + contextId);
254
254
  }
255
255
 
256
+ public WritableMap tokenizeSync(double id, final String text) {
257
+ int contextId = (int) id;
258
+ LlamaContext context = contexts.get(contextId);
259
+ if (context == null) {
260
+ return Arguments.createMap();
261
+ }
262
+ return context.tokenize(text);
263
+ }
264
+
265
+
256
266
  public void detokenize(double id, final ReadableArray tokens, final Promise promise) {
257
267
  final int contextId = (int) id;
258
268
  AsyncTask task = new AsyncTask<Void, Void, String>() {
@@ -129,6 +129,7 @@ Java_com_rnllama_LlamaContext_initContext(
129
129
  jint n_gpu_layers, // TODO: Support this
130
130
  jboolean use_mlock,
131
131
  jboolean use_mmap,
132
+ jboolean vocab_only,
132
133
  jstring lora_str,
133
134
  jfloat lora_scaled,
134
135
  jstring lora_base_str,
@@ -139,6 +140,11 @@ Java_com_rnllama_LlamaContext_initContext(
139
140
 
140
141
  gpt_params defaultParams;
141
142
 
143
+ defaultParams.vocab_only = vocab_only;
144
+ if(vocab_only) {
145
+ defaultParams.warmup = false;
146
+ }
147
+
142
148
  const char *model_path_chars = env->GetStringUTFChars(model_path_str, nullptr);
143
149
  defaultParams.model = model_path_chars;
144
150
 
@@ -6,6 +6,7 @@ import com.facebook.react.bridge.Promise;
6
6
  import com.facebook.react.bridge.ReactApplicationContext;
7
7
  import com.facebook.react.bridge.ReactMethod;
8
8
  import com.facebook.react.bridge.ReadableMap;
9
+ import com.facebook.react.bridge.WritableMap;
9
10
  import com.facebook.react.bridge.ReadableArray;
10
11
  import com.facebook.react.module.annotations.ReactModule;
11
12
 
@@ -63,8 +64,13 @@ public class RNLlamaModule extends NativeRNLlamaSpec {
63
64
  }
64
65
 
65
66
  @ReactMethod
66
- public void tokenize(double id, final String text, final Promise promise) {
67
- rnllama.tokenize(id, text, promise);
67
+ public void tokenizeAsync(double id, final String text, final Promise promise) {
68
+ rnllama.tokenizeAsync(id, text, promise);
69
+ }
70
+
71
+ @ReactMethod(isBlockingSynchronousMethod=true)
72
+ public WritableMap tokenizeSync(double id, final String text) {
73
+ return rnllama.tokenizeSync(id, text);
68
74
  }
69
75
 
70
76
  @ReactMethod
@@ -7,6 +7,7 @@ import com.facebook.react.bridge.ReactApplicationContext;
7
7
  import com.facebook.react.bridge.ReactContextBaseJavaModule;
8
8
  import com.facebook.react.bridge.ReactMethod;
9
9
  import com.facebook.react.bridge.ReadableMap;
10
+ import com.facebook.react.bridge.WritableMap;
10
11
  import com.facebook.react.bridge.ReadableArray;
11
12
  import com.facebook.react.module.annotations.ReactModule;
12
13
 
@@ -64,12 +65,17 @@ public class RNLlamaModule extends ReactContextBaseJavaModule {
64
65
  }
65
66
 
66
67
  @ReactMethod
67
- public void tokenize(double id, final String text, final Promise promise) {
68
- rnllama.tokenize(id, text, promise);
68
+ public void tokenizeAsync(double id, final String text, final Promise promise) {
69
+ rnllama.tokenizeAsync(id, text, promise);
70
+ }
71
+
72
+ @ReactMethod(isBlockingSynchronousMethod=true)
73
+ public WritableMap tokenizeSync(double id, final String text) {
74
+ return rnllama.tokenizeSync(id, text);
69
75
  }
70
76
 
71
77
  @ReactMethod
72
- public void detokenize(double id, final ReadableArray tokens, final Promise promise) {
78
+ public void detokenize(double id, final ReadableArray tokens, final Promise promise) {
73
79
  rnllama.detokenize(id, tokens, promise);
74
80
  }
75
81