llama-cpp-capacitor 0.0.1 → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/src/main/CMakeLists.txt +33 -31
- package/android/src/main/java/ai/annadata/plugin/capacitor/LlamaCppPlugin.java +124 -25
- package/dist/esm/index.js +1 -11
- package/dist/esm/index.js.map +1 -1
- package/dist/plugin.cjs.js +2 -12
- package/dist/plugin.cjs.js.map +1 -1
- package/dist/plugin.js +3 -14
- package/dist/plugin.js.map +1 -1
- package/package.json +4 -3
- package/types/llama-cpp-capacitor.d.ts +441 -0
|
@@ -109,40 +109,42 @@ endfunction()
|
|
|
109
109
|
|
|
110
110
|
# Build for different architectures
|
|
111
111
|
if (ANDROID_ABI STREQUAL "arm64-v8a")
|
|
112
|
-
build_library(llama-cpp-arm64-v8a "
|
|
112
|
+
build_library(llama-cpp-arm64-v8a "arm" "-march=armv8-a")
|
|
113
113
|
elseif (ANDROID_ABI STREQUAL "armeabi-v7a")
|
|
114
114
|
build_library(llama-cpp-armeabi-v7a "arm" "-march=armv7-a -mfpu=neon")
|
|
115
115
|
elseif (ANDROID_ABI STREQUAL "x86")
|
|
116
|
-
build_library(llama-cpp-x86 "
|
|
116
|
+
build_library(llama-cpp-x86 "x86" "-march=i686 -mtune=intel -mssse3 -mfpmath=sse -m32")
|
|
117
117
|
elseif (ANDROID_ABI STREQUAL "x86_64")
|
|
118
|
-
build_library(llama-cpp-x86_64 "
|
|
118
|
+
build_library(llama-cpp-x86_64 "x86" "-march=x86-64 -msse4.2 -mpopcnt -m64 -mtune=intel")
|
|
119
119
|
endif()
|
|
120
120
|
|
|
121
|
-
# Set compile definitions
|
|
122
|
-
|
|
123
|
-
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
121
|
+
# Set compile definitions for the target that was actually built
|
|
122
|
+
if (ANDROID_ABI STREQUAL "arm64-v8a")
|
|
123
|
+
target_compile_definitions(llama-cpp-arm64-v8a PRIVATE
|
|
124
|
+
-DNDEBUG
|
|
125
|
+
-DO3
|
|
126
|
+
-DLM_GGML_USE_CPU
|
|
127
|
+
-DLM_GGML_CPU_GENERIC
|
|
128
|
+
)
|
|
129
|
+
elseif (ANDROID_ABI STREQUAL "armeabi-v7a")
|
|
130
|
+
target_compile_definitions(llama-cpp-armeabi-v7a PRIVATE
|
|
131
|
+
-DNDEBUG
|
|
132
|
+
-DO3
|
|
133
|
+
-DLM_GGML_USE_CPU
|
|
134
|
+
-DLM_GGML_CPU_GENERIC
|
|
135
|
+
)
|
|
136
|
+
elseif (ANDROID_ABI STREQUAL "x86")
|
|
137
|
+
target_compile_definitions(llama-cpp-x86 PRIVATE
|
|
138
|
+
-DNDEBUG
|
|
139
|
+
-DO3
|
|
140
|
+
-DLM_GGML_USE_CPU
|
|
141
|
+
-DLM_GGML_CPU_GENERIC
|
|
142
|
+
)
|
|
143
|
+
elseif (ANDROID_ABI STREQUAL "x86_64")
|
|
144
|
+
target_compile_definitions(llama-cpp-x86_64 PRIVATE
|
|
145
|
+
-DNDEBUG
|
|
146
|
+
-DO3
|
|
147
|
+
-DLM_GGML_USE_CPU
|
|
148
|
+
-DLM_GGML_CPU_GENERIC
|
|
149
|
+
)
|
|
150
|
+
endif()
|
|
@@ -1,10 +1,13 @@
|
|
|
1
1
|
package ai.annadata.plugin.capacitor;
|
|
2
2
|
|
|
3
3
|
import com.getcapacitor.JSObject;
|
|
4
|
+
import com.getcapacitor.JSArray;
|
|
4
5
|
import com.getcapacitor.Plugin;
|
|
5
6
|
import com.getcapacitor.PluginCall;
|
|
6
7
|
import com.getcapacitor.PluginMethod;
|
|
7
8
|
import com.getcapacitor.annotation.CapacitorPlugin;
|
|
9
|
+
import java.util.Map;
|
|
10
|
+
import org.json.JSONException;
|
|
8
11
|
|
|
9
12
|
@CapacitorPlugin(name = "LlamaCpp")
|
|
10
13
|
public class LlamaCppPlugin extends Plugin {
|
|
@@ -40,12 +43,27 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
40
43
|
@PluginMethod
|
|
41
44
|
public void modelInfo(PluginCall call) {
|
|
42
45
|
String path = call.getString("path", "");
|
|
43
|
-
|
|
44
|
-
|
|
46
|
+
JSArray skipArray = call.getArray("skip");
|
|
47
|
+
String[] skip = new String[0];
|
|
48
|
+
if (skipArray != null) {
|
|
49
|
+
skip = new String[skipArray.length()];
|
|
50
|
+
for (int i = 0; i < skipArray.length(); i++) {
|
|
51
|
+
try {
|
|
52
|
+
skip[i] = skipArray.getString(i);
|
|
53
|
+
} catch (JSONException e) {
|
|
54
|
+
skip[i] = "";
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
45
58
|
|
|
46
59
|
implementation.modelInfo(path, skip, result -> {
|
|
47
60
|
if (result.isSuccess()) {
|
|
48
|
-
|
|
61
|
+
JSObject jsResult = new JSObject();
|
|
62
|
+
Map<String, Object> data = result.getData();
|
|
63
|
+
for (Map.Entry<String, Object> entry : data.entrySet()) {
|
|
64
|
+
jsResult.put(entry.getKey(), entry.getValue());
|
|
65
|
+
}
|
|
66
|
+
call.resolve(jsResult);
|
|
49
67
|
} else {
|
|
50
68
|
call.reject(result.getError().getMessage());
|
|
51
69
|
}
|
|
@@ -59,7 +77,12 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
59
77
|
|
|
60
78
|
implementation.initContext(contextId, params, result -> {
|
|
61
79
|
if (result.isSuccess()) {
|
|
62
|
-
|
|
80
|
+
JSObject jsResult = new JSObject();
|
|
81
|
+
Map<String, Object> data = result.getData();
|
|
82
|
+
for (Map.Entry<String, Object> entry : data.entrySet()) {
|
|
83
|
+
jsResult.put(entry.getKey(), entry.getValue());
|
|
84
|
+
}
|
|
85
|
+
call.resolve(jsResult);
|
|
63
86
|
} else {
|
|
64
87
|
call.reject(result.getError().getMessage());
|
|
65
88
|
}
|
|
@@ -101,7 +124,12 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
101
124
|
|
|
102
125
|
implementation.getFormattedChat(contextId, messages, chatTemplate, params, result -> {
|
|
103
126
|
if (result.isSuccess()) {
|
|
104
|
-
|
|
127
|
+
JSObject jsResult = new JSObject();
|
|
128
|
+
Map<String, Object> data = result.getData();
|
|
129
|
+
for (Map.Entry<String, Object> entry : data.entrySet()) {
|
|
130
|
+
jsResult.put(entry.getKey(), entry.getValue());
|
|
131
|
+
}
|
|
132
|
+
call.resolve(jsResult);
|
|
105
133
|
} else {
|
|
106
134
|
call.reject(result.getError().getMessage());
|
|
107
135
|
}
|
|
@@ -115,7 +143,12 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
115
143
|
|
|
116
144
|
implementation.completion(contextId, params, result -> {
|
|
117
145
|
if (result.isSuccess()) {
|
|
118
|
-
|
|
146
|
+
JSObject jsResult = new JSObject();
|
|
147
|
+
Map<String, Object> data = result.getData();
|
|
148
|
+
for (Map.Entry<String, Object> entry : data.entrySet()) {
|
|
149
|
+
jsResult.put(entry.getKey(), entry.getValue());
|
|
150
|
+
}
|
|
151
|
+
call.resolve(jsResult);
|
|
119
152
|
} else {
|
|
120
153
|
call.reject(result.getError().getMessage());
|
|
121
154
|
}
|
|
@@ -140,11 +173,16 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
140
173
|
@PluginMethod
|
|
141
174
|
public void loadSession(PluginCall call) {
|
|
142
175
|
int contextId = call.getInt("contextId", 0);
|
|
143
|
-
String
|
|
176
|
+
String path = call.getString("path", "");
|
|
144
177
|
|
|
145
|
-
implementation.loadSession(contextId,
|
|
178
|
+
implementation.loadSession(contextId, path, result -> {
|
|
146
179
|
if (result.isSuccess()) {
|
|
147
|
-
|
|
180
|
+
JSObject jsResult = new JSObject();
|
|
181
|
+
Map<String, Object> data = result.getData();
|
|
182
|
+
for (Map.Entry<String, Object> entry : data.entrySet()) {
|
|
183
|
+
jsResult.put(entry.getKey(), entry.getValue());
|
|
184
|
+
}
|
|
185
|
+
call.resolve(jsResult);
|
|
148
186
|
} else {
|
|
149
187
|
call.reject(result.getError().getMessage());
|
|
150
188
|
}
|
|
@@ -154,10 +192,10 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
154
192
|
@PluginMethod
|
|
155
193
|
public void saveSession(PluginCall call) {
|
|
156
194
|
int contextId = call.getInt("contextId", 0);
|
|
157
|
-
String
|
|
195
|
+
String path = call.getString("path", "");
|
|
158
196
|
int size = call.getInt("size", -1);
|
|
159
197
|
|
|
160
|
-
implementation.saveSession(contextId,
|
|
198
|
+
implementation.saveSession(contextId, path, size, result -> {
|
|
161
199
|
if (result.isSuccess()) {
|
|
162
200
|
JSObject ret = new JSObject();
|
|
163
201
|
ret.put("tokensSaved", result.getData());
|
|
@@ -174,12 +212,27 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
174
212
|
public void tokenize(PluginCall call) {
|
|
175
213
|
int contextId = call.getInt("contextId", 0);
|
|
176
214
|
String text = call.getString("text", "");
|
|
177
|
-
|
|
178
|
-
|
|
215
|
+
JSArray imagePathsArray = call.getArray("imagePaths");
|
|
216
|
+
String[] imagePaths = new String[0];
|
|
217
|
+
if (imagePathsArray != null) {
|
|
218
|
+
imagePaths = new String[imagePathsArray.length()];
|
|
219
|
+
for (int i = 0; i < imagePathsArray.length(); i++) {
|
|
220
|
+
try {
|
|
221
|
+
imagePaths[i] = imagePathsArray.getString(i);
|
|
222
|
+
} catch (JSONException e) {
|
|
223
|
+
imagePaths[i] = "";
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
}
|
|
179
227
|
|
|
180
228
|
implementation.tokenize(contextId, text, imagePaths, result -> {
|
|
181
229
|
if (result.isSuccess()) {
|
|
182
|
-
|
|
230
|
+
JSObject jsResult = new JSObject();
|
|
231
|
+
Map<String, Object> data = result.getData();
|
|
232
|
+
for (Map.Entry<String, Object> entry : data.entrySet()) {
|
|
233
|
+
jsResult.put(entry.getKey(), entry.getValue());
|
|
234
|
+
}
|
|
235
|
+
call.resolve(jsResult);
|
|
183
236
|
} else {
|
|
184
237
|
call.reject(result.getError().getMessage());
|
|
185
238
|
}
|
|
@@ -189,8 +242,18 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
189
242
|
@PluginMethod
|
|
190
243
|
public void detokenize(PluginCall call) {
|
|
191
244
|
int contextId = call.getInt("contextId", 0);
|
|
192
|
-
|
|
193
|
-
|
|
245
|
+
JSArray tokensArray = call.getArray("tokens");
|
|
246
|
+
Integer[] tokens = new Integer[0];
|
|
247
|
+
if (tokensArray != null) {
|
|
248
|
+
tokens = new Integer[tokensArray.length()];
|
|
249
|
+
for (int i = 0; i < tokensArray.length(); i++) {
|
|
250
|
+
try {
|
|
251
|
+
tokens[i] = tokensArray.getInt(i);
|
|
252
|
+
} catch (JSONException e) {
|
|
253
|
+
tokens[i] = 0;
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
194
257
|
|
|
195
258
|
implementation.detokenize(contextId, tokens, result -> {
|
|
196
259
|
if (result.isSuccess()) {
|
|
@@ -213,7 +276,12 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
213
276
|
|
|
214
277
|
implementation.embedding(contextId, text, params, result -> {
|
|
215
278
|
if (result.isSuccess()) {
|
|
216
|
-
|
|
279
|
+
JSObject jsResult = new JSObject();
|
|
280
|
+
Map<String, Object> data = result.getData();
|
|
281
|
+
for (Map.Entry<String, Object> entry : data.entrySet()) {
|
|
282
|
+
jsResult.put(entry.getKey(), entry.getValue());
|
|
283
|
+
}
|
|
284
|
+
call.resolve(jsResult);
|
|
217
285
|
} else {
|
|
218
286
|
call.reject(result.getError().getMessage());
|
|
219
287
|
}
|
|
@@ -224,8 +292,18 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
224
292
|
public void rerank(PluginCall call) {
|
|
225
293
|
int contextId = call.getInt("contextId", 0);
|
|
226
294
|
String query = call.getString("query", "");
|
|
227
|
-
|
|
228
|
-
|
|
295
|
+
JSArray documentsArray = call.getArray("documents");
|
|
296
|
+
String[] documents = new String[0];
|
|
297
|
+
if (documentsArray != null) {
|
|
298
|
+
documents = new String[documentsArray.length()];
|
|
299
|
+
for (int i = 0; i < documentsArray.length(); i++) {
|
|
300
|
+
try {
|
|
301
|
+
documents[i] = documentsArray.getString(i);
|
|
302
|
+
} catch (JSONException e) {
|
|
303
|
+
documents[i] = "";
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
}
|
|
229
307
|
JSObject params = call.getObject("params");
|
|
230
308
|
|
|
231
309
|
implementation.rerank(contextId, query, documents, params, result -> {
|
|
@@ -265,8 +343,15 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
265
343
|
@PluginMethod
|
|
266
344
|
public void applyLoraAdapters(PluginCall call) {
|
|
267
345
|
int contextId = call.getInt("contextId", 0);
|
|
268
|
-
|
|
269
|
-
|
|
346
|
+
JSArray loraAdaptersArray = call.getArray("loraAdapters");
|
|
347
|
+
JSObject[] loraAdapters = new JSObject[0];
|
|
348
|
+
if (loraAdaptersArray != null) {
|
|
349
|
+
loraAdapters = new JSObject[loraAdaptersArray.length()];
|
|
350
|
+
for (int i = 0; i < loraAdaptersArray.length(); i++) {
|
|
351
|
+
// For now, create empty JSObjects since the exact method is unclear
|
|
352
|
+
loraAdapters[i] = new JSObject();
|
|
353
|
+
}
|
|
354
|
+
}
|
|
270
355
|
|
|
271
356
|
implementation.applyLoraAdapters(contextId, loraAdapters, result -> {
|
|
272
357
|
if (result.isSuccess()) {
|
|
@@ -346,7 +431,9 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
346
431
|
|
|
347
432
|
implementation.getMultimodalSupport(contextId, result -> {
|
|
348
433
|
if (result.isSuccess()) {
|
|
349
|
-
|
|
434
|
+
JSObject ret = new JSObject();
|
|
435
|
+
ret.put("support", result.getData());
|
|
436
|
+
call.resolve(ret);
|
|
350
437
|
} else {
|
|
351
438
|
call.reject(result.getError().getMessage());
|
|
352
439
|
}
|
|
@@ -409,7 +496,9 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
409
496
|
|
|
410
497
|
implementation.getFormattedAudioCompletion(contextId, speakerJsonStr, textToSpeak, result -> {
|
|
411
498
|
if (result.isSuccess()) {
|
|
412
|
-
|
|
499
|
+
JSObject ret = new JSObject();
|
|
500
|
+
ret.put("completion", result.getData());
|
|
501
|
+
call.resolve(ret);
|
|
413
502
|
} else {
|
|
414
503
|
call.reject(result.getError().getMessage());
|
|
415
504
|
}
|
|
@@ -435,8 +524,18 @@ public class LlamaCppPlugin extends Plugin {
|
|
|
435
524
|
@PluginMethod
|
|
436
525
|
public void decodeAudioTokens(PluginCall call) {
|
|
437
526
|
int contextId = call.getInt("contextId", 0);
|
|
438
|
-
|
|
439
|
-
|
|
527
|
+
JSArray tokensArray = call.getArray("tokens");
|
|
528
|
+
Integer[] tokens = new Integer[0];
|
|
529
|
+
if (tokensArray != null) {
|
|
530
|
+
tokens = new Integer[tokensArray.length()];
|
|
531
|
+
for (int i = 0; i < tokensArray.length(); i++) {
|
|
532
|
+
try {
|
|
533
|
+
tokens[i] = tokensArray.getInt(i);
|
|
534
|
+
} catch (JSONException e) {
|
|
535
|
+
tokens[i] = 0;
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
}
|
|
440
539
|
|
|
441
540
|
implementation.decodeAudioTokens(contextId, tokens, result -> {
|
|
442
541
|
if (result.isSuccess()) {
|
package/dist/esm/index.js
CHANGED
|
@@ -1,15 +1,5 @@
|
|
|
1
|
-
var __rest = (this && this.__rest) || function (s, e) {
|
|
2
|
-
var t = {};
|
|
3
|
-
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
|
4
|
-
t[p] = s[p];
|
|
5
|
-
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
|
6
|
-
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
|
7
|
-
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
|
8
|
-
t[p[i]] = s[p[i]];
|
|
9
|
-
}
|
|
10
|
-
return t;
|
|
11
|
-
};
|
|
12
1
|
var _a, _b, _c;
|
|
2
|
+
import { __rest } from "tslib";
|
|
13
3
|
import { registerPlugin } from '@capacitor/core';
|
|
14
4
|
// Constants
|
|
15
5
|
export const LLAMACPP_MTMD_DEFAULT_MEDIA_MARKER = '<__media__>';
|
package/dist/esm/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AA+BjD,YAAY;AACZ,MAAM,CAAC,MAAM,kCAAkC,GAAG,aAAa,CAAC;AAEhE,cAAc;AACd,MAAM,8BAA8B,GAAG,iCAAiC,CAAC;AACzE,MAAM,cAAc,GAAG,mBAAmB,CAAC;AAC3C,MAAM,mBAAmB,GAAG,uBAAuB,CAAC;AAEpD,sBAAsB;AACtB,MAAM,QAAQ,GAAG,cAAc,CAAiB,UAAU,CAAC,CAAC;AAE5D,2BAA2B;AAC3B,MAAM,YAAY,GAAiD,EAAE,CAAC;AAEtE,6BAA6B;AAC7B,QAAQ,CAAC,WAAW,CAAC,mBAAmB,EAAE,CAAC,GAAoC,EAAE,EAAE;IACjF,YAAY,CAAC,OAAO,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AACpE,CAAC,CAAC,CAAC;AAEH,4CAA4C;AAC5C,kBAAA,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,eAAe,+CAAzB,QAAQ,EAAoB,EAAE,OAAO,EAAE,KAAK,EAAE,2CAAG,KAAK,mDAAG,GAAG,EAAE,GAAE,CAAC,EAAE;AAiCnE,MAAM,CAAC,MAAM,iCAAiC,GAAG,kCAAkC,CAAC;AA0BpF,MAAM,eAAe,GAAG;IACtB,KAAK;IACL,KAAK;IACL,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,QAAQ;IACR,MAAM;IACN,MAAM;CACP,CAAC;AAEF,MAAM,aAAa,GAAG,CAAC,cAAyC,EAAE,EAAE;;IAClE,IAAI,CAAA,cAAc,aAAd,cAAc,uBAAd,cAAc,CAAE,IAAI,MAAK,aAAa,EAAE;QAC1C,aAAO,cAAc,CAAC,WAAW,0CAAE,MAAM,CAAC;KAC3C;IACD,IAAI,CAAA,cAAc,aAAd,cAAc,uBAAd,cAAc,CAAE,IAAI,MAAK,aAAa,EAAE;QAC1C,OAAO,cAAc,CAAC,MAAM,IAAI,EAAE,CAAC;KACpC;IACD,OAAO,IAAI,CAAC;AACd,CAAC,CAAC;AAEF,MAAM,OAAO,YAAY;IAMvB,YAAY,EAAE,SAAS,EAAE,GAAG,EAAE,WAAW,EAAE,KAAK,EAAsB;QAJtE,QAAG,GAAY,KAAK,CAAC;QACrB,gBAAW,GAAW,EAAE,CAAC;QAIvB,IAAI,CAAC,EAAE,GAAG,SAAS,CAAC;QACpB,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QACf,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACrB,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,WAAW,CAAC,QAAgB;QAChC,IAAI,IAAI,GAAG,QAAQ,CAAC;QACpB,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;YAAE,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACrD,OAAO,QAAQ,CAAC,WAAW,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;IACtE,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,WAAW,CACf,QAAgB,EAChB,OAA+B;QAE/B,OAAO,QAAQ,CAAC,WAAW,CAAC;YAC1B,SAAS,EAAE,IAAI,CAAC,EAAE;YAClB,QAAQ;YACR,IAAI,EAAE,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,KAAI,CAAC,CAAC;SAC/B,CAAC,CAAC;IACL,CAAC;IAED,oBAAoB;QAClB,OAAO,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,SAAS,CAAC;IAC9C,CAAC;IAED,gBAAgB;QACd,MAAM,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC;QAC3C,OAAO,CAAC,EAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,CAAA,IAAI,CAAC,EAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,CAAA,CAAC;IAC9C,CAAC;IAED,KAAK,CAAC,gBAAgB,CACpB,QAAuC,EACvC,QAAwB,EACxB,MAUC;;QAED,MAAM,UAAU,GAAa,EAAE,CAAC;QAChC,MAAM,IAAI,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE;YAChC,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE;gBAC9B,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;;oBACvC,4BAA4B;oBAC5B,IAAI,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE;wBAC7B,IAAI,IAAI,GAAG,OAAA,IAAI,CAAC,SAAS,0CAAE,GAAG,KAAI,EAAE,CAAC;wBACrC,IAAI,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,UAAU,CAAC,SAAS;4BAAG,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;wBACtD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;wBACtB,OAAO;4BACL,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,iCAAiC;yBACxC,CAAC;qBACH;yBAAM,IAAI,IAAI,CAAC,IAAI,KAAK,aAAa,EAAE;wBACtC,MAAM,EAAE,WAAW,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC;wBACpC,IAAI,CAAC,KAAK;4BAAE,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;wBAEvD,MAAM,EAAE,MAAM,EAAE,GAAG,KAAK,CAAC;wBACzB,IAAI,MAAM,IAAI,KAAK,IAAI,MAAM,IAAI,KAAK,EAAE;4BACtC,MAAM,IAAI,KAAK,CAAC,6BAA6B,MAAM,EAAE,CAAC,CAAC;yBACxD;wBACD,IAAI,KAAK,CAAC,GAAG,EAAE;4BACb,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC,CAAC;4BAChD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;yBACvB;6BAAM,IAAI,KAAK,CAAC,IAAI,EAAE;4BACrB,UAAU,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;yBAC7B;wBACD,OAAO;4BACL,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,iCAAiC;yBACxC,CAAC;qBACH;oBACD,OAAO,IAAI,CAAC;gBACd,CAAC,CAAC,CAAC;gBAEH,uCACK,GAAG,KACN,OAAO,IACP;aACH;YACD,OAAO,GAAG,CAAC;QACb,CAAC,CAA6B,CAAC;QAE/B,MAAM,QAAQ,GAAG,IAAI,CAAC,gBAAgB,EAAE,KAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,KAAK,CAAA,CAAC;QAC1D,IAAI,IAAI,CAAC;QACT,IAAI,QAAQ;YAAE,IAAI,GAAG,QAAQ,CAAC,CAAC,4BAA4B;QAC3D,MAAM,UAAU,GAAG,aAAa,CAAC,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,eAAe,CAAC,CAAC;QAE1D,MAAM,MAAM,GAAG,MAAM,QAAQ,CAAC,gBAAgB,CAAC;YAC7C,SAAS,EAAE,IAAI,CAAC,EAAE;YAClB,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;YAC9B,YAAY,EAAE,IAAI;YAClB,MAAM,EAAE;gBACN,KAAK,EAAE,QAAQ;gBACf,WAAW,EAAE,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,SAAS;gBAChE,KAAK,EAAE,CAAA,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,KAAK,EAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS;gBAC/D,mBAAmB,EAAE,CAAA,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,mBAAmB,EAC9C,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,mBAAmB,CAAC;oBAC5C,CAAC,CAAC,SAAS;gBACb,WAAW,EAAE,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,WAAW;gBAChC,eAAe,QAAE,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,eAAe,mCAAI,IAAI;gBAChD,qBAAqB,EAAE,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,qBAAqB;gBACpD,GAAG,EAAE,QAAO,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,GAAG,CAAA,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,GAAG;gBAC1E,oBAAoB,EAAE,CAAA,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,oBAAoB,EAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CACjE,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE;oBACvE,GAAG,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,0CAA0C;oBAC5E,OAAO,GAAG,CAAC;gBACb,CAAC,EAAE,EAAyB,CAAC,CAC9B,CAAC,CAAC,CAAC,SAAS;aACd;SACF,CAAC,CAAC;QAEH,IAAI,CAAC,QAAQ,EAAE;YACb,OAAO;gBACL,IAAI,EAAE,YAAY;gBAClB,MAAM,EAAE,MAAgB;gBACxB,SAAS,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC;gBAChC,WAAW,EAAE,UAAU;aACxB,CAAC;SACH;QACD,MAAM,WAAW,GAAG,MAAkC,CAAC;QACvD,WAAW,CAAC,IAAI,GAAG,OAAO,CAAC;QAC3B,WAAW,CAAC,SAAS,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;QAC9C,WAAW,CAAC,WAAW,GAAG,UAAU,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;;;;;;;;OASG;IACH,KAAK,CAAC,UAAU,CACd,MAAwB,EACxB,QAAoC;QAEpC,MAAM,YAAY,mCACb,MAAM,KACT,MAAM,EAAE,MAAM,CAAC,MAAM,IAAI,EAAE,EAC3B,uBAAuB,EAAE,CAAC,CAAC,QAAQ,GACpC,CAAC;QAEF,IAAI,MAAM,CAAC,QAAQ,EAAE;YACnB,MAAM,eAAe,GAAG,MAAM,IAAI,CAAC,gBAAgB,CACjD,MAAM,CAAC,QAAQ,EACf,MAAM,CAAC,aAAa,IAAI,MAAM,CAAC,YAAY,EAC3C;gBACE,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,mBAAmB,EAAE,MAAM,CAAC,mBAAmB;gBAC/C,WAAW,EAAE,MAAM,CAAC,WAAW;gBAC/B,eAAe,EAAE,MAAM,CAAC,eAAe;gBACvC,qBAAqB,EAAE,MAAM,CAAC,qBAAqB;gBACnD,GAAG,EAAE,MAAM,CAAC,GAAG;gBACf,oBAAoB,EAAE,MAAM,CAAC,oBAAoB;aAClD,CACF,CAAC;YACF,IAAI,eAAe,CAAC,IAAI,KAAK,OAAO,EAAE;gBACpC,MAAM,WAAW,GAAG,eAA2C,CAAC;gBAEhE,YAAY,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,IAAI,EAAE,CAAC;gBAC/C,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,QAAQ;oBAC7C,YAAY,CAAC,WAAW,GAAG,WAAW,CAAC,WAAW,CAAC;gBACrD,IAAI,WAAW,CAAC,OAAO;oBAAE,YAAY,CAAC,OAAO,GAAG,WAAW,CAAC,OAAO,CAAC;gBACpE,IAAI,OAAO,WAAW,CAAC,YAAY,KAAK,SAAS;oBAC/C,YAAY,CAAC,YAAY,GAAG,WAAW,CAAC,YAAY,CAAC;gBACvD,IAAI,WAAW,CAAC,gBAAgB;oBAC9B,YAAY,CAAC,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAC;gBAC/D,IAAI,WAAW,CAAC,gBAAgB;oBAC9B,YAAY,CAAC,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAC;gBAC/D,IAAI,WAAW,CAAC,gBAAgB,EAAE;oBAChC,IAAI,CAAC,YAAY,CAAC,IAAI;wBAAE,YAAY,CAAC,IAAI,GAAG,EAAE,CAAC;oBAC/C,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,gBAAgB,CAAC,CAAC;iBACzD;gBACD,IAAI,WAAW,CAAC,SAAS,EAAE;oBACzB,YAAY,CAAC,WAAW,GAAG,WAAW,CAAC,WAAW,CAAC;iBACpD;aACF;iBAAM,IAAI,eAAe,CAAC,IAAI,KAAK,YAAY,EAAE;gBAChD,MAAM,eAAe,GAAG,eAAsC,CAAC;gBAC/D,YAAY,CAAC,MAAM,GAAG,eAAe,CAAC,MAAM,IAAI,EAAE,CAAC;gBACnD,IAAI,eAAe,CAAC,SAAS,EAAE;oBAC7B,YAAY,CAAC,WAAW,GAAG,eAAe,CAAC,WAAW,CAAC;iBACxD;aACF;SACF;aAAM;YACL,YAAY,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;SAC3C;QAED,+EAA+E;QAC/E,IAAI,CAAC,YAAY,CAAC,WAAW,IAAI,MAAM,CAAC,WAAW,EAAE;YACnD,YAAY,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;SAC/C;QAED,IAAI,YAAY,CAAC,eAAe,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE;YACzD,MAAM,UAAU,GAAG,aAAa,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;YACzD,IAAI,UAAU;gBAAE,YAAY,CAAC,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;SACvE;QAED,IAAI,aAAa,GACf,QAAQ;YACR,QAAQ,CAAC,WAAW,CAAC,cAAc,EAAE,CAAC,GAAqB,EAAE,EAAE;gBAC7D,MAAM,EAAE,SAAS,EAAE,WAAW,EAAE,GAAG,GAAG,CAAC;gBACvC,IAAI,SAAS,KAAK,IAAI,CAAC,EAAE;oBAAE,OAAO;gBAClC,QAAQ,CAAC,WAAW,CAAC,CAAC;YACxB,CAAC,CAAC,CAAC;QAEL,IAAI,CAAC,YAAY,CAAC,MAAM;YAAE,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAC;QAEhE,MAAM,OAAO,GAAG,QAAQ,CAAC,UAAU,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC,CAAC;QAClF,OAAO,OAAO;aACX,IAAI,CAAC,CAAC,gBAAgB,EAAE,EAAE;YACzB,aAAa,aAAb,aAAa,uBAAb,aAAa,CAAE,MAAM,GAAG;YACxB,aAAa,GAAG,IAAI,CAAC;YACrB,OAAO,gBAAgB,CAAC;QAC1B,CAAC,CAAC;aACD,KAAK,CAAC,CAAC,GAAQ,EAAE,EAAE;YAClB,aAAa,aAAb,aAAa,uBAAb,aAAa,CAAE,MAAM,GAAG;YACxB,aAAa,GAAG,IAAI,CAAC;YACrB,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC,CAAC;IACP,CAAC;IAED,cAAc;QACZ,OAAO,QAAQ,CAAC,cAAc,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC;IAED;;;;;OAKG;IACH,QAAQ,CACN,IAAY,EACZ,EACE,WAAW,EAAE,UAAU,MAGrB,EAAE;QAEN,OAAO,QAAQ,CAAC,QAAQ,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,UAAU,EAAE,CAAC,CAAC;IACjF,CAAC;IAED,UAAU,CAAC,MAAgB;QACzB,OAAO,QAAQ,CAAC,UAAU,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,CAAC,CAAC;IAC7D,CAAC;IAED,SAAS,CACP,IAAY,EACZ,MAAwB;QAExB,OAAO,QAAQ,CAAC,SAAS,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,IAAI,EAAE,EAAE,CAAC,CAAC;IAChF,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,MAAM,CACV,KAAa,EACb,SAAmB,EACnB,MAAqB;QAErB,MAAM,OAAO,GAAG,MAAM,QAAQ,CAAC,MAAM,CAAC;YACpC,SAAS,EAAE,IAAI,CAAC,EAAE;YAClB,KAAK;YACL,SAAS;YACT,MAAM,EAAE,MAAM,IAAI,EAAE;SACrB,CAAC,CAAC;QAEH,8DAA8D;QAC9D,OAAO,OAAO;aACX,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,iCACZ,MAAM,KACT,QAAQ,EAAE,SAAS,CAAC,MAAM,CAAC,KAAK,CAAC,IACjC,CAAC;aACF,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC;IACvC,CAAC;IAED,KAAK,CAAC,KAAK,CACT,EAAU,EACV,EAAU,EACV,EAAU,EACV,EAAU;QAEV,MAAM,MAAM,GAAG,MAAM,QAAQ,CAAC,KAAK,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;QAC5E,MAAM,CAAC,SAAS,EAAE,SAAS,EAAE,YAAY,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC,GACpE,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;QACrB,OAAO;YACL,SAAS;YACT,SAAS;YACT,YAAY;YACZ,KAAK;YACL,KAAK;YACL,KAAK;YACL,KAAK;SACN,CAAC;IACJ,CAAC;IAED,KAAK,CAAC,iBAAiB,CACrB,QAAkD;QAElD,IAAI,YAAY,GAA6C,EAAE,CAAC;QAChE,IAAI,QAAQ;YACV,YAAY,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;gBAClC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;gBACrC,MAAM,EAAE,CAAC,CAAC,MAAM;aACjB,CAAC,CAAC,CAAC;QACN,OAAO,QAAQ,CAAC,iBAAiB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC;IAC1E,CAAC;IAED,KAAK,CAAC,kBAAkB;QACtB,OAAO,QAAQ,CAAC,kBAAkB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IAC7D,CAAC;IAED,KAAK,CAAC,qBAAqB;QAGzB,OAAO,QAAQ,CAAC,qBAAqB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IAChE,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,cAAc,CAAC,EACnB,IAAI,EACJ,OAAO,EAAE,MAAM,GAIhB;QACC,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;YAAE,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACrD,OAAO,QAAQ,CAAC,cAAc,CAAC;YAC7B,SAAS,EAAE,IAAI,CAAC,EAAE;YAClB,MAAM,EAAE;gBACN,IAAI;gBACJ,OAAO,EAAE,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,IAAI;aACxB;SACF,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,mBAAmB;QACvB,OAAO,MAAM,QAAQ,CAAC,mBAAmB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IACpE,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,oBAAoB;QAIxB,OAAO,MAAM,QAAQ,CAAC,oBAAoB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IACrE,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,iBAAiB;QACrB,OAAO,MAAM,QAAQ,CAAC,iBAAiB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IAClE,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAsC;QAC7E,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;YAAE,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACrD,OAAO,MAAM,QAAQ,CAAC,WAAW,CAAC;YAChC,SAAS,EAAE,IAAI,CAAC,EAAE;YAClB,MAAM,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE;SAClC,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,gBAAgB;QACpB,OAAO,MAAM,QAAQ,CAAC,gBAAgB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IACjE,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,2BAA2B,CAC/B,OAAsB,EACtB,WAAmB;QAKnB,OAAO,MAAM,QAAQ,CAAC,2BAA2B,CAAC;YAChD,SAAS,EAAE,IAAI,CAAC,EAAE;YAClB,cAAc,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;YACtD,WAAW;SACZ,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,6BAA6B,CACjC,WAAmB;QAEnB,OAAO,MAAM,QAAQ,CAAC,6BAA6B,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,CAAC;IAC3F,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,iBAAiB,CAAC,MAAgB;QACtC,OAAO,MAAM,QAAQ,CAAC,iBAAiB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,CAAC,CAAC;IAC1E,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,cAAc;QAClB,OAAO,MAAM,QAAQ,CAAC,cAAc,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IAC/D,CAAC;IAED,KAAK,CAAC,OAAO;QACX,OAAO,QAAQ,CAAC,cAAc,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC;CACF;AAED,MAAM,CAAC,KAAK,UAAU,eAAe,CAAC,OAAgB;IACpD,OAAO,QAAQ,CAAC,eAAe,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC;AAC/C,CAAC;AAED,MAAM,UAAU,oBAAoB,CAClC,QAA+C;IAE/C,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;IAC5B,OAAO;QACL,MAAM,EAAE,GAAG,EAAE;YACX,YAAY,CAAC,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;QACzD,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,eAAe,CAAC,KAAa;IACjD,OAAO,QAAQ,CAAC,eAAe,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC;AAC7C,CAAC;AAED,IAAI,gBAAgB,GAAG,CAAC,CAAC;AACzB,MAAM,eAAe,GAAG,GAAG,EAAE,CAC3B,OAAO,CAAC,GAAG,CAAC,QAAQ,KAAK,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,MAAM,CAAC,CAAC;AAE3E,MAAM,aAAa,GAAG;IACpB,eAAe;IACf,uBAAuB;IACvB,2BAA2B;IAC3B,uBAAuB;IACvB,uBAAuB;CACxB,CAAC;AAEF,MAAM,CAAC,KAAK,UAAU,kBAAkB,CAAC,KAAa;IACpD,IAAI,IAAI,GAAG,KAAK,CAAC;IACjB,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;QAAE,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IACrD,OAAO,QAAQ,CAAC,SAAS,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,aAAa,EAAE,CAAC,CAAC;AAC3D,CAAC;AAED,MAAM,WAAW,GAAG;IAClB,iCAAiC;IACjC,IAAI,EAAE,CAAC;IACP,IAAI,EAAE,CAAC;IACP,GAAG,EAAE,CAAC;IACN,IAAI,EAAE,CAAC;IACP,IAAI,EAAE,CAAC;CACR,CAAC;AAEF,MAAM,CAAC,KAAK,UAAU,SAAS,CAC7B,EAOgB,EAChB,UAAuC;QARvC,EACE,KAAK,EACL,cAAc,EAAE,YAAY,EAC5B,YAAY,EAAE,WAAW,EACzB,IAAI,EACJ,SAAS,EAAE,QAAQ,OAEL,EADX,IAAI,cANT,gEAOC,CADQ;IAIT,IAAI,IAAI,GAAG,KAAK,CAAC;IACjB,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;QAAE,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAErD,IAAI,QAAQ,GAAG,IAAI,CAAC;IACpB,IAAI,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,UAAU,CAAC,SAAS;QAAG,QAAQ,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAElE,IAAI,YAAY,GAA6C,EAAE,CAAC;IAChE,IAAI,QAAQ;QACV,YAAY,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;YAClC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;YACrC,MAAM,EAAE,CAAC,CAAC,MAAM;SACjB,CAAC,CAAC,CAAC;IAEN,MAAM,SAAS,GAAG,gBAAgB,GAAG,eAAe,EAAE,CAAC;IACvD,gBAAgB,IAAI,CAAC,CAAC;IAEtB,IAAI,sBAAsB,GAAQ,IAAI,CAAC;IACvC,IAAI,UAAU,EAAE;QACd,sBAAsB,GAAG,QAAQ,CAAC,WAAW,CAC3C,8BAA8B,EAC9B,CAAC,GAA4C,EAAE,EAAE;YAC/C,IAAI,GAAG,CAAC,SAAS,KAAK,SAAS;gBAAE,OAAO;YACxC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;QAC3B,CAAC,CACF,CAAC;KACH;IAED,MAAM,QAAQ,GAAG,WAAW,CAAC,WAAuC,CAAC,CAAC;IAEtE,IAAI,IAAI,CAAC,YAAY,IAAI,CAAC,eAAe,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;QACrE,OAAO,CAAC,IAAI,CAAC,+CAA+C,IAAI,CAAC,YAAY,uBAAuB,CAAC,CAAC;QACtG,OAAO,IAAI,CAAC,YAAY,CAAC;KAC1B;IACD,IAAI,IAAI,CAAC,YAAY,IAAI,CAAC,eAAe,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;QACrE,OAAO,CAAC,IAAI,CAAC,+CAA+C,IAAI,CAAC,YAAY,uBAAuB,CAAC,CAAC;QACtG,OAAO,IAAI,CAAC,YAAY,CAAC;KAC1B;IAED,MAAM,EACJ,GAAG,EACH,WAAW,EACX,KAAK,EAAE,YAAY,EACnB,UAAU,GACX,GAAG,MAAM,QAAQ,CAAC,WAAW,CAAC;QAC7B,SAAS;QACT,MAAM,kBACJ,KAAK,EAAE,IAAI,EACX,cAAc,EAAE,CAAC,CAAC,YAAY,EAC9B,qBAAqB,EAAE,CAAC,CAAC,UAAU,EACnC,YAAY,EAAE,QAAQ,EACtB,IAAI,EAAE,QAAQ,EACd,SAAS,EAAE,YAAY,IACpB,IAAI,CACR;KACF,CAAC,CAAC,KAAK,CAAC,CAAC,GAAQ,EAAE,EAAE;QACpB,sBAAsB,aAAtB,sBAAsB,uBAAtB,sBAAsB,CAAE,MAAM,GAAG;QACjC,MAAM,GAAG,CAAC;IACZ,CAAC,CAAC,CAAC;IACH,sBAAsB,aAAtB,sBAAsB,uBAAtB,sBAAsB,CAAE,MAAM,GAAG;IACjC,OAAO,IAAI,YAAY,CAAC;QACtB,SAAS;QACT,GAAG;QACH,WAAW;QACX,KAAK,EAAE,YAAY;QACnB,UAAU;KACX,CAAC,CAAC;AACL,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,eAAe;IACnC,OAAO,QAAQ,CAAC,kBAAkB,EAAE,CAAC;AACvC,CAAC;AAED,MAAM,CAAC,MAAM,SAAS,GAAG;IACvB,MAAM,EAAE,OAAO;IACf,MAAM,EAAE,qBAAqB;CAC9B,CAAC;AAEF,yCAAyC;AACzC,OAAO,EAAE,QAAQ,EAAE,CAAC","sourcesContent":["import { registerPlugin } from '@capacitor/core';\nimport type {\n NativeContextParams,\n NativeLlamaContext,\n NativeCompletionParams,\n NativeCompletionTokenProb,\n NativeCompletionResult,\n NativeTokenizeResult,\n NativeEmbeddingResult,\n NativeSessionLoadResult,\n NativeEmbeddingParams,\n NativeRerankParams,\n NativeRerankResult,\n NativeCompletionTokenProbItem,\n NativeCompletionResultTimings,\n JinjaFormattedChatResult,\n FormattedChatResult,\n NativeImageProcessingResult,\n NativeLlamaChatMessage,\n LlamaCppMessagePart,\n LlamaCppOAICompatibleMessage,\n ContextParams,\n EmbeddingParams,\n RerankParams,\n RerankResult,\n CompletionResponseFormat,\n CompletionParams,\n BenchResult,\n LlamaCppPlugin,\n} from './definitions';\n\n// Constants\nexport const LLAMACPP_MTMD_DEFAULT_MEDIA_MARKER = '<__media__>';\n\n// Event names\nconst EVENT_ON_INIT_CONTEXT_PROGRESS = '@LlamaCpp_onInitContextProgress';\nconst EVENT_ON_TOKEN = '@LlamaCpp_onToken';\nconst EVENT_ON_NATIVE_LOG = '@LlamaCpp_onNativeLog';\n\n// Register the plugin\nconst LlamaCpp = registerPlugin<LlamaCppPlugin>('LlamaCpp');\n\n// Log listeners management\nconst logListeners: Array<(level: string, text: string) => void> = [];\n\n// Set up native log listener\nLlamaCpp.addListener(EVENT_ON_NATIVE_LOG, (evt: { level: string; text: string }) => {\n logListeners.forEach((listener) => listener(evt.level, evt.text));\n});\n\n// Trigger unset to use default log callback\nLlamaCpp?.toggleNativeLog?.({ enabled: false })?.catch?.(() => {});\n\n// High-level types for the plugin interface\nexport type RNLlamaMessagePart = LlamaCppMessagePart;\nexport type RNLlamaOAICompatibleMessage = LlamaCppOAICompatibleMessage;\n\n// Re-export all types from definitions\nexport type {\n NativeContextParams,\n NativeLlamaContext,\n NativeCompletionParams,\n NativeCompletionTokenProb,\n NativeCompletionResult,\n NativeTokenizeResult,\n NativeEmbeddingResult,\n NativeSessionLoadResult,\n NativeEmbeddingParams,\n NativeRerankParams,\n NativeRerankResult,\n NativeCompletionTokenProbItem,\n NativeCompletionResultTimings,\n FormattedChatResult,\n JinjaFormattedChatResult,\n NativeImageProcessingResult,\n ContextParams,\n EmbeddingParams,\n RerankParams,\n RerankResult,\n CompletionResponseFormat,\n CompletionParams,\n BenchResult,\n};\n\nexport const RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER = LLAMACPP_MTMD_DEFAULT_MEDIA_MARKER;\n\nexport type ToolCall = {\n type: 'function';\n id?: string;\n function: {\n name: string;\n arguments: string; // JSON string\n };\n};\n\nexport type TokenData = {\n token: string;\n completion_probabilities?: Array<NativeCompletionTokenProb>;\n // Parsed content from accumulated text\n content?: string;\n reasoning_content?: string;\n tool_calls?: Array<ToolCall>;\n accumulated_text?: string;\n};\n\ntype TokenNativeEvent = {\n contextId: number;\n tokenResult: TokenData;\n};\n\nconst validCacheTypes = [\n 'f16',\n 'f32',\n 'bf16',\n 'q8_0',\n 'q4_0',\n 'q4_1',\n 'iq4_nl',\n 'q5_0',\n 'q5_1',\n];\n\nconst getJsonSchema = (responseFormat?: CompletionResponseFormat) => {\n if (responseFormat?.type === 'json_schema') {\n return responseFormat.json_schema?.schema;\n }\n if (responseFormat?.type === 'json_object') {\n return responseFormat.schema || {};\n }\n return null;\n};\n\nexport class LlamaContext {\n id: number;\n gpu: boolean = false;\n reasonNoGPU: string = '';\n model: NativeLlamaContext['model'];\n\n constructor({ contextId, gpu, reasonNoGPU, model }: NativeLlamaContext) {\n this.id = contextId;\n this.gpu = gpu;\n this.reasonNoGPU = reasonNoGPU;\n this.model = model;\n }\n\n /**\n * Load cached prompt & completion state from a file.\n */\n async loadSession(filepath: string): Promise<NativeSessionLoadResult> {\n let path = filepath;\n if (path.startsWith('file://')) path = path.slice(7);\n return LlamaCpp.loadSession({ contextId: this.id, filepath: path });\n }\n\n /**\n * Save current cached prompt & completion state to a file.\n */\n async saveSession(\n filepath: string,\n options?: { tokenSize: number },\n ): Promise<number> {\n return LlamaCpp.saveSession({ \n contextId: this.id, \n filepath, \n size: options?.tokenSize || -1 \n });\n }\n\n isLlamaChatSupported(): boolean {\n return !!this.model.chatTemplates.llamaChat;\n }\n\n isJinjaSupported(): boolean {\n const { minja } = this.model.chatTemplates;\n return !!minja?.toolUse || !!minja?.default;\n }\n\n async getFormattedChat(\n messages: RNLlamaOAICompatibleMessage[],\n template?: string | null,\n params?: {\n jinja?: boolean;\n response_format?: CompletionResponseFormat;\n tools?: object;\n parallel_tool_calls?: object;\n tool_choice?: string;\n enable_thinking?: boolean;\n add_generation_prompt?: boolean;\n now?: string | number;\n chat_template_kwargs?: Record<string, string>;\n },\n ): Promise<FormattedChatResult | JinjaFormattedChatResult> {\n const mediaPaths: string[] = [];\n const chat = messages.map((msg) => {\n if (Array.isArray(msg.content)) {\n const content = msg.content.map((part) => {\n // Handle multimodal content\n if (part.type === 'image_url') {\n let path = part.image_url?.url || '';\n if (path?.startsWith('file://')) path = path.slice(7);\n mediaPaths.push(path);\n return {\n type: 'text',\n text: RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER,\n };\n } else if (part.type === 'input_audio') {\n const { input_audio: audio } = part;\n if (!audio) throw new Error('input_audio is required');\n\n const { format } = audio;\n if (format != 'wav' && format != 'mp3') {\n throw new Error(`Unsupported audio format: ${format}`);\n }\n if (audio.url) {\n const path = audio.url.replace(/file:\\/\\//, '');\n mediaPaths.push(path);\n } else if (audio.data) {\n mediaPaths.push(audio.data);\n }\n return {\n type: 'text',\n text: RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER,\n };\n }\n return part;\n });\n\n return {\n ...msg,\n content,\n };\n }\n return msg;\n }) as NativeLlamaChatMessage[];\n\n const useJinja = this.isJinjaSupported() && params?.jinja;\n let tmpl;\n if (template) tmpl = template; // Force replace if provided\n const jsonSchema = getJsonSchema(params?.response_format);\n\n const result = await LlamaCpp.getFormattedChat({\n contextId: this.id,\n messages: JSON.stringify(chat),\n chatTemplate: tmpl,\n params: {\n jinja: useJinja,\n json_schema: jsonSchema ? JSON.stringify(jsonSchema) : undefined,\n tools: params?.tools ? JSON.stringify(params.tools) : undefined,\n parallel_tool_calls: params?.parallel_tool_calls\n ? JSON.stringify(params.parallel_tool_calls)\n : undefined,\n tool_choice: params?.tool_choice,\n enable_thinking: params?.enable_thinking ?? true,\n add_generation_prompt: params?.add_generation_prompt,\n now: typeof params?.now === 'number' ? params.now.toString() : params?.now,\n chat_template_kwargs: params?.chat_template_kwargs ? JSON.stringify(\n Object.entries(params.chat_template_kwargs).reduce((acc, [key, value]) => {\n acc[key] = JSON.stringify(value); // Each value is a stringified JSON object\n return acc;\n }, {} as Record<string, any>)\n ) : undefined,\n },\n });\n\n if (!useJinja) {\n return {\n type: 'llama-chat',\n prompt: result as string,\n has_media: mediaPaths.length > 0,\n media_paths: mediaPaths,\n };\n }\n const jinjaResult = result as JinjaFormattedChatResult;\n jinjaResult.type = 'jinja';\n jinjaResult.has_media = mediaPaths.length > 0;\n jinjaResult.media_paths = mediaPaths;\n return jinjaResult;\n }\n\n /**\n * Generate a completion based on the provided parameters\n * @param params Completion parameters including prompt or messages\n * @param callback Optional callback for token-by-token streaming\n * @returns Promise resolving to the completion result\n *\n * Note: For multimodal support, you can include an media_paths parameter.\n * This will process the images and add them to the context before generating text.\n * Multimodal support must be enabled via initMultimodal() first.\n */\n async completion(\n params: CompletionParams,\n callback?: (data: TokenData) => void,\n ): Promise<NativeCompletionResult> {\n const nativeParams = {\n ...params,\n prompt: params.prompt || '',\n emit_partial_completion: !!callback,\n };\n\n if (params.messages) {\n const formattedResult = await this.getFormattedChat(\n params.messages,\n params.chat_template || params.chatTemplate,\n {\n jinja: params.jinja,\n tools: params.tools,\n parallel_tool_calls: params.parallel_tool_calls,\n tool_choice: params.tool_choice,\n enable_thinking: params.enable_thinking,\n add_generation_prompt: params.add_generation_prompt,\n now: params.now,\n chat_template_kwargs: params.chat_template_kwargs,\n },\n );\n if (formattedResult.type === 'jinja') {\n const jinjaResult = formattedResult as JinjaFormattedChatResult;\n\n nativeParams.prompt = jinjaResult.prompt || '';\n if (typeof jinjaResult.chat_format === 'number')\n nativeParams.chat_format = jinjaResult.chat_format;\n if (jinjaResult.grammar) nativeParams.grammar = jinjaResult.grammar;\n if (typeof jinjaResult.grammar_lazy === 'boolean')\n nativeParams.grammar_lazy = jinjaResult.grammar_lazy;\n if (jinjaResult.grammar_triggers)\n nativeParams.grammar_triggers = jinjaResult.grammar_triggers;\n if (jinjaResult.preserved_tokens)\n nativeParams.preserved_tokens = jinjaResult.preserved_tokens;\n if (jinjaResult.additional_stops) {\n if (!nativeParams.stop) nativeParams.stop = [];\n nativeParams.stop.push(...jinjaResult.additional_stops);\n }\n if (jinjaResult.has_media) {\n nativeParams.media_paths = jinjaResult.media_paths;\n }\n } else if (formattedResult.type === 'llama-chat') {\n const llamaChatResult = formattedResult as FormattedChatResult;\n nativeParams.prompt = llamaChatResult.prompt || '';\n if (llamaChatResult.has_media) {\n nativeParams.media_paths = llamaChatResult.media_paths;\n }\n }\n } else {\n nativeParams.prompt = params.prompt || '';\n }\n\n // If media_paths were explicitly provided or extracted from messages, use them\n if (!nativeParams.media_paths && params.media_paths) {\n nativeParams.media_paths = params.media_paths;\n }\n\n if (nativeParams.response_format && !nativeParams.grammar) {\n const jsonSchema = getJsonSchema(params.response_format);\n if (jsonSchema) nativeParams.json_schema = JSON.stringify(jsonSchema);\n }\n\n let tokenListener: any =\n callback &&\n LlamaCpp.addListener(EVENT_ON_TOKEN, (evt: TokenNativeEvent) => {\n const { contextId, tokenResult } = evt;\n if (contextId !== this.id) return;\n callback(tokenResult);\n });\n\n if (!nativeParams.prompt) throw new Error('Prompt is required');\n\n const promise = LlamaCpp.completion({ contextId: this.id, params: nativeParams });\n return promise\n .then((completionResult) => {\n tokenListener?.remove();\n tokenListener = null;\n return completionResult;\n })\n .catch((err: any) => {\n tokenListener?.remove();\n tokenListener = null;\n throw err;\n });\n }\n\n stopCompletion(): Promise<void> {\n return LlamaCpp.stopCompletion({ contextId: this.id });\n }\n\n /**\n * Tokenize text or text with images\n * @param text Text to tokenize\n * @param params.media_paths Array of image paths to tokenize (if multimodal is enabled)\n * @returns Promise resolving to the tokenize result\n */\n tokenize(\n text: string,\n {\n media_paths: mediaPaths,\n }: {\n media_paths?: string[];\n } = {},\n ): Promise<NativeTokenizeResult> {\n return LlamaCpp.tokenize({ contextId: this.id, text, imagePaths: mediaPaths });\n }\n\n detokenize(tokens: number[]): Promise<string> {\n return LlamaCpp.detokenize({ contextId: this.id, tokens });\n }\n\n embedding(\n text: string,\n params?: EmbeddingParams,\n ): Promise<NativeEmbeddingResult> {\n return LlamaCpp.embedding({ contextId: this.id, text, params: params || {} });\n }\n\n /**\n * Rerank documents based on relevance to a query\n * @param query The query text to rank documents against\n * @param documents Array of document texts to rank\n * @param params Optional reranking parameters\n * @returns Promise resolving to an array of ranking results with scores and indices\n */\n async rerank(\n query: string,\n documents: string[],\n params?: RerankParams,\n ): Promise<RerankResult[]> {\n const results = await LlamaCpp.rerank({ \n contextId: this.id, \n query, \n documents, \n params: params || {} \n });\n\n // Sort by score descending and add document text if requested\n return results\n .map((result) => ({\n ...result,\n document: documents[result.index],\n }))\n .sort((a, b) => b.score - a.score);\n }\n\n async bench(\n pp: number,\n tg: number,\n pl: number,\n nr: number,\n ): Promise<BenchResult> {\n const result = await LlamaCpp.bench({ contextId: this.id, pp, tg, pl, nr });\n const [modelDesc, modelSize, modelNParams, ppAvg, ppStd, tgAvg, tgStd] =\n JSON.parse(result);\n return {\n modelDesc,\n modelSize,\n modelNParams,\n ppAvg,\n ppStd,\n tgAvg,\n tgStd,\n };\n }\n\n async applyLoraAdapters(\n loraList: Array<{ path: string; scaled?: number }>,\n ): Promise<void> {\n let loraAdapters: Array<{ path: string; scaled?: number }> = [];\n if (loraList)\n loraAdapters = loraList.map((l) => ({\n path: l.path.replace(/file:\\/\\//, ''),\n scaled: l.scaled,\n }));\n return LlamaCpp.applyLoraAdapters({ contextId: this.id, loraAdapters });\n }\n\n async removeLoraAdapters(): Promise<void> {\n return LlamaCpp.removeLoraAdapters({ contextId: this.id });\n }\n\n async getLoadedLoraAdapters(): Promise<\n Array<{ path: string; scaled?: number }>\n > {\n return LlamaCpp.getLoadedLoraAdapters({ contextId: this.id });\n }\n\n /**\n * Initialize multimodal support with a mmproj file\n * @param params Parameters for multimodal support\n * @param params.path Path to the multimodal projector file\n * @param params.use_gpu Whether to use GPU\n * @returns Promise resolving to true if initialization was successful\n */\n async initMultimodal({\n path,\n use_gpu: useGpu,\n }: {\n path: string;\n use_gpu?: boolean;\n }): Promise<boolean> {\n if (path.startsWith('file://')) path = path.slice(7);\n return LlamaCpp.initMultimodal({\n contextId: this.id,\n params: {\n path,\n use_gpu: useGpu ?? true,\n },\n });\n }\n\n /**\n * Check if multimodal support is enabled\n * @returns Promise resolving to true if multimodal is enabled\n */\n async isMultimodalEnabled(): Promise<boolean> {\n return await LlamaCpp.isMultimodalEnabled({ contextId: this.id });\n }\n\n /**\n * Check multimodal support\n * @returns Promise resolving to an object with vision and audio support\n */\n async getMultimodalSupport(): Promise<{\n vision: boolean;\n audio: boolean;\n }> {\n return await LlamaCpp.getMultimodalSupport({ contextId: this.id });\n }\n\n /**\n * Release multimodal support\n * @returns Promise resolving to void\n */\n async releaseMultimodal(): Promise<void> {\n return await LlamaCpp.releaseMultimodal({ contextId: this.id });\n }\n\n /**\n * Initialize TTS support with a vocoder model\n * @param params Parameters for TTS support\n * @param params.path Path to the vocoder model\n * @param params.n_batch Batch size for the vocoder model\n * @returns Promise resolving to true if initialization was successful\n */\n async initVocoder({ path, n_batch: nBatch }: { path: string; n_batch?: number }): Promise<boolean> {\n if (path.startsWith('file://')) path = path.slice(7);\n return await LlamaCpp.initVocoder({ \n contextId: this.id, \n params: { path, n_batch: nBatch } \n });\n }\n\n /**\n * Check if TTS support is enabled\n * @returns Promise resolving to true if TTS is enabled\n */\n async isVocoderEnabled(): Promise<boolean> {\n return await LlamaCpp.isVocoderEnabled({ contextId: this.id });\n }\n\n /**\n * Get a formatted audio completion prompt\n * @param speakerJsonStr JSON string representing the speaker\n * @param textToSpeak Text to speak\n * @returns Promise resolving to the formatted audio completion result with prompt and grammar\n */\n async getFormattedAudioCompletion(\n speaker: object | null,\n textToSpeak: string,\n ): Promise<{\n prompt: string;\n grammar?: string;\n }> {\n return await LlamaCpp.getFormattedAudioCompletion({\n contextId: this.id,\n speakerJsonStr: speaker ? JSON.stringify(speaker) : '',\n textToSpeak,\n });\n }\n\n /**\n * Get guide tokens for audio completion\n * @param textToSpeak Text to speak\n * @returns Promise resolving to the guide tokens\n */\n async getAudioCompletionGuideTokens(\n textToSpeak: string,\n ): Promise<Array<number>> {\n return await LlamaCpp.getAudioCompletionGuideTokens({ contextId: this.id, textToSpeak });\n }\n\n /**\n * Decode audio tokens\n * @param tokens Array of audio tokens\n * @returns Promise resolving to the decoded audio tokens\n */\n async decodeAudioTokens(tokens: number[]): Promise<Array<number>> {\n return await LlamaCpp.decodeAudioTokens({ contextId: this.id, tokens });\n }\n\n /**\n * Release TTS support\n * @returns Promise resolving to void\n */\n async releaseVocoder(): Promise<void> {\n return await LlamaCpp.releaseVocoder({ contextId: this.id });\n }\n\n async release(): Promise<void> {\n return LlamaCpp.releaseContext({ contextId: this.id });\n }\n}\n\nexport async function toggleNativeLog(enabled: boolean): Promise<void> {\n return LlamaCpp.toggleNativeLog({ enabled });\n}\n\nexport function addNativeLogListener(\n listener: (level: string, text: string) => void,\n): { remove: () => void } {\n logListeners.push(listener);\n return {\n remove: () => {\n logListeners.splice(logListeners.indexOf(listener), 1);\n },\n };\n}\n\nexport async function setContextLimit(limit: number): Promise<void> {\n return LlamaCpp.setContextLimit({ limit });\n}\n\nlet contextIdCounter = 0;\nconst contextIdRandom = () =>\n process.env.NODE_ENV === 'test' ? 0 : Math.floor(Math.random() * 100000);\n\nconst modelInfoSkip = [\n // Large fields\n 'tokenizer.ggml.tokens',\n 'tokenizer.ggml.token_type',\n 'tokenizer.ggml.merges',\n 'tokenizer.ggml.scores',\n];\n\nexport async function loadLlamaModelInfo(model: string): Promise<Object> {\n let path = model;\n if (path.startsWith('file://')) path = path.slice(7);\n return LlamaCpp.modelInfo({ path, skip: modelInfoSkip });\n}\n\nconst poolTypeMap = {\n // -1 is unspecified as undefined\n none: 0,\n mean: 1,\n cls: 2,\n last: 3,\n rank: 4,\n};\n\nexport async function initLlama(\n {\n model,\n is_model_asset: isModelAsset,\n pooling_type: poolingType,\n lora,\n lora_list: loraList,\n ...rest\n }: ContextParams,\n onProgress?: (progress: number) => void,\n): Promise<LlamaContext> {\n let path = model;\n if (path.startsWith('file://')) path = path.slice(7);\n\n let loraPath = lora;\n if (loraPath?.startsWith('file://')) loraPath = loraPath.slice(7);\n\n let loraAdapters: Array<{ path: string; scaled?: number }> = [];\n if (loraList)\n loraAdapters = loraList.map((l) => ({\n path: l.path.replace(/file:\\/\\//, ''),\n scaled: l.scaled,\n }));\n\n const contextId = contextIdCounter + contextIdRandom();\n contextIdCounter += 1;\n\n let removeProgressListener: any = null;\n if (onProgress) {\n removeProgressListener = LlamaCpp.addListener(\n EVENT_ON_INIT_CONTEXT_PROGRESS,\n (evt: { contextId: number; progress: number }) => {\n if (evt.contextId !== contextId) return;\n onProgress(evt.progress);\n },\n );\n }\n\n const poolType = poolTypeMap[poolingType as keyof typeof poolTypeMap];\n\n if (rest.cache_type_k && !validCacheTypes.includes(rest.cache_type_k)) {\n console.warn(`[LlamaCpp] initLlama: Invalid cache K type: ${rest.cache_type_k}, falling back to f16`);\n delete rest.cache_type_k;\n }\n if (rest.cache_type_v && !validCacheTypes.includes(rest.cache_type_v)) {\n console.warn(`[LlamaCpp] initLlama: Invalid cache V type: ${rest.cache_type_v}, falling back to f16`);\n delete rest.cache_type_v;\n }\n\n const {\n gpu,\n reasonNoGPU,\n model: modelDetails,\n androidLib,\n } = await LlamaCpp.initContext({\n contextId,\n params: {\n model: path,\n is_model_asset: !!isModelAsset,\n use_progress_callback: !!onProgress,\n pooling_type: poolType,\n lora: loraPath,\n lora_list: loraAdapters,\n ...rest,\n },\n }).catch((err: any) => {\n removeProgressListener?.remove();\n throw err;\n });\n removeProgressListener?.remove();\n return new LlamaContext({\n contextId,\n gpu,\n reasonNoGPU,\n model: modelDetails,\n androidLib,\n });\n}\n\nexport async function releaseAllLlama(): Promise<void> {\n return LlamaCpp.releaseAllContexts();\n}\n\nexport const BuildInfo = {\n number: '1.0.0',\n commit: 'capacitor-llama-cpp',\n};\n\n// Re-export the plugin for direct access\nexport { LlamaCpp };\n"]}
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AA+BjD,YAAY;AACZ,MAAM,CAAC,MAAM,kCAAkC,GAAG,aAAa,CAAC;AAEhE,cAAc;AACd,MAAM,8BAA8B,GAAG,iCAAiC,CAAC;AACzE,MAAM,cAAc,GAAG,mBAAmB,CAAC;AAC3C,MAAM,mBAAmB,GAAG,uBAAuB,CAAC;AAEpD,sBAAsB;AACtB,MAAM,QAAQ,GAAG,cAAc,CAAiB,UAAU,CAAC,CAAC;AAE5D,2BAA2B;AAC3B,MAAM,YAAY,GAAiD,EAAE,CAAC;AAEtE,6BAA6B;AAC7B,QAAQ,CAAC,WAAW,CAAC,mBAAmB,EAAE,CAAC,GAAoC,EAAE,EAAE;IACjF,YAAY,CAAC,OAAO,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AACpE,CAAC,CAAC,CAAC;AAEH,4CAA4C;AAC5C,kBAAA,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,eAAe,+CAAzB,QAAQ,EAAoB,EAAE,OAAO,EAAE,KAAK,EAAE,2CAAG,KAAK,mDAAG,GAAG,EAAE,GAAE,CAAC,EAAE;AAiCnE,MAAM,CAAC,MAAM,iCAAiC,GAAG,kCAAkC,CAAC;AA0BpF,MAAM,eAAe,GAAG;IACtB,KAAK;IACL,KAAK;IACL,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,QAAQ;IACR,MAAM;IACN,MAAM;CACP,CAAC;AAEF,MAAM,aAAa,GAAG,CAAC,cAAyC,EAAE,EAAE;;IAClE,IAAI,CAAA,cAAc,aAAd,cAAc,uBAAd,cAAc,CAAE,IAAI,MAAK,aAAa,EAAE;QAC1C,aAAO,cAAc,CAAC,WAAW,0CAAE,MAAM,CAAC;KAC3C;IACD,IAAI,CAAA,cAAc,aAAd,cAAc,uBAAd,cAAc,CAAE,IAAI,MAAK,aAAa,EAAE;QAC1C,OAAO,cAAc,CAAC,MAAM,IAAI,EAAE,CAAC;KACpC;IACD,OAAO,IAAI,CAAC;AACd,CAAC,CAAC;AAEF,MAAM,OAAO,YAAY;IAMvB,YAAY,EAAE,SAAS,EAAE,GAAG,EAAE,WAAW,EAAE,KAAK,EAAsB;QAJtE,QAAG,GAAY,KAAK,CAAC;QACrB,gBAAW,GAAW,EAAE,CAAC;QAIvB,IAAI,CAAC,EAAE,GAAG,SAAS,CAAC;QACpB,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QACf,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACrB,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,WAAW,CAAC,QAAgB;QAChC,IAAI,IAAI,GAAG,QAAQ,CAAC;QACpB,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;YAAE,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACrD,OAAO,QAAQ,CAAC,WAAW,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;IACtE,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,WAAW,CACf,QAAgB,EAChB,OAA+B;QAE/B,OAAO,QAAQ,CAAC,WAAW,CAAC;YAC1B,SAAS,EAAE,IAAI,CAAC,EAAE;YAClB,QAAQ;YACR,IAAI,EAAE,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,KAAI,CAAC,CAAC;SAC/B,CAAC,CAAC;IACL,CAAC;IAED,oBAAoB;QAClB,OAAO,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,SAAS,CAAC;IAC9C,CAAC;IAED,gBAAgB;QACd,MAAM,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC;QAC3C,OAAO,CAAC,EAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,CAAA,IAAI,CAAC,EAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,CAAA,CAAC;IAC9C,CAAC;IAED,KAAK,CAAC,gBAAgB,CACpB,QAAuC,EACvC,QAAwB,EACxB,MAUC;;QAED,MAAM,UAAU,GAAa,EAAE,CAAC;QAChC,MAAM,IAAI,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE;YAChC,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE;gBAC9B,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;;oBACvC,4BAA4B;oBAC5B,IAAI,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE;wBAC7B,IAAI,IAAI,GAAG,OAAA,IAAI,CAAC,SAAS,0CAAE,GAAG,KAAI,EAAE,CAAC;wBACrC,IAAI,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,UAAU,CAAC,SAAS;4BAAG,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;wBACtD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;wBACtB,OAAO;4BACL,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,iCAAiC;yBACxC,CAAC;qBACH;yBAAM,IAAI,IAAI,CAAC,IAAI,KAAK,aAAa,EAAE;wBACtC,MAAM,EAAE,WAAW,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC;wBACpC,IAAI,CAAC,KAAK;4BAAE,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;wBAEvD,MAAM,EAAE,MAAM,EAAE,GAAG,KAAK,CAAC;wBACzB,IAAI,MAAM,IAAI,KAAK,IAAI,MAAM,IAAI,KAAK,EAAE;4BACtC,MAAM,IAAI,KAAK,CAAC,6BAA6B,MAAM,EAAE,CAAC,CAAC;yBACxD;wBACD,IAAI,KAAK,CAAC,GAAG,EAAE;4BACb,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC,CAAC;4BAChD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;yBACvB;6BAAM,IAAI,KAAK,CAAC,IAAI,EAAE;4BACrB,UAAU,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;yBAC7B;wBACD,OAAO;4BACL,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,iCAAiC;yBACxC,CAAC;qBACH;oBACD,OAAO,IAAI,CAAC;gBACd,CAAC,CAAC,CAAC;gBAEH,uCACK,GAAG,KACN,OAAO,IACP;aACH;YACD,OAAO,GAAG,CAAC;QACb,CAAC,CAA6B,CAAC;QAE/B,MAAM,QAAQ,GAAG,IAAI,CAAC,gBAAgB,EAAE,KAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,KAAK,CAAA,CAAC;QAC1D,IAAI,IAAI,CAAC;QACT,IAAI,QAAQ;YAAE,IAAI,GAAG,QAAQ,CAAC,CAAC,4BAA4B;QAC3D,MAAM,UAAU,GAAG,aAAa,CAAC,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,eAAe,CAAC,CAAC;QAE1D,MAAM,MAAM,GAAG,MAAM,QAAQ,CAAC,gBAAgB,CAAC;YAC7C,SAAS,EAAE,IAAI,CAAC,EAAE;YAClB,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;YAC9B,YAAY,EAAE,IAAI;YAClB,MAAM,EAAE;gBACN,KAAK,EAAE,QAAQ;gBACf,WAAW,EAAE,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,SAAS;gBAChE,KAAK,EAAE,CAAA,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,KAAK,EAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS;gBAC/D,mBAAmB,EAAE,CAAA,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,mBAAmB,EAC9C,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,mBAAmB,CAAC;oBAC5C,CAAC,CAAC,SAAS;gBACb,WAAW,EAAE,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,WAAW;gBAChC,eAAe,QAAE,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,eAAe,mCAAI,IAAI;gBAChD,qBAAqB,EAAE,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,qBAAqB;gBACpD,GAAG,EAAE,QAAO,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,GAAG,CAAA,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,GAAG;gBAC1E,oBAAoB,EAAE,CAAA,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,oBAAoB,EAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CACjE,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE;oBACvE,GAAG,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,0CAA0C;oBAC5E,OAAO,GAAG,CAAC;gBACb,CAAC,EAAE,EAAyB,CAAC,CAC9B,CAAC,CAAC,CAAC,SAAS;aACd;SACF,CAAC,CAAC;QAEH,IAAI,CAAC,QAAQ,EAAE;YACb,OAAO;gBACL,IAAI,EAAE,YAAY;gBAClB,MAAM,EAAE,MAAgB;gBACxB,SAAS,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC;gBAChC,WAAW,EAAE,UAAU;aACxB,CAAC;SACH;QACD,MAAM,WAAW,GAAG,MAAkC,CAAC;QACvD,WAAW,CAAC,IAAI,GAAG,OAAO,CAAC;QAC3B,WAAW,CAAC,SAAS,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;QAC9C,WAAW,CAAC,WAAW,GAAG,UAAU,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;;;;;;;;OASG;IACH,KAAK,CAAC,UAAU,CACd,MAAwB,EACxB,QAAoC;QAEpC,MAAM,YAAY,mCACb,MAAM,KACT,MAAM,EAAE,MAAM,CAAC,MAAM,IAAI,EAAE,EAC3B,uBAAuB,EAAE,CAAC,CAAC,QAAQ,GACpC,CAAC;QAEF,IAAI,MAAM,CAAC,QAAQ,EAAE;YACnB,MAAM,eAAe,GAAG,MAAM,IAAI,CAAC,gBAAgB,CACjD,MAAM,CAAC,QAAQ,EACf,MAAM,CAAC,aAAa,IAAI,MAAM,CAAC,YAAY,EAC3C;gBACE,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,mBAAmB,EAAE,MAAM,CAAC,mBAAmB;gBAC/C,WAAW,EAAE,MAAM,CAAC,WAAW;gBAC/B,eAAe,EAAE,MAAM,CAAC,eAAe;gBACvC,qBAAqB,EAAE,MAAM,CAAC,qBAAqB;gBACnD,GAAG,EAAE,MAAM,CAAC,GAAG;gBACf,oBAAoB,EAAE,MAAM,CAAC,oBAAoB;aAClD,CACF,CAAC;YACF,IAAI,eAAe,CAAC,IAAI,KAAK,OAAO,EAAE;gBACpC,MAAM,WAAW,GAAG,eAA2C,CAAC;gBAEhE,YAAY,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,IAAI,EAAE,CAAC;gBAC/C,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,QAAQ;oBAC7C,YAAY,CAAC,WAAW,GAAG,WAAW,CAAC,WAAW,CAAC;gBACrD,IAAI,WAAW,CAAC,OAAO;oBAAE,YAAY,CAAC,OAAO,GAAG,WAAW,CAAC,OAAO,CAAC;gBACpE,IAAI,OAAO,WAAW,CAAC,YAAY,KAAK,SAAS;oBAC/C,YAAY,CAAC,YAAY,GAAG,WAAW,CAAC,YAAY,CAAC;gBACvD,IAAI,WAAW,CAAC,gBAAgB;oBAC9B,YAAY,CAAC,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAC;gBAC/D,IAAI,WAAW,CAAC,gBAAgB;oBAC9B,YAAY,CAAC,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAC;gBAC/D,IAAI,WAAW,CAAC,gBAAgB,EAAE;oBAChC,IAAI,CAAC,YAAY,CAAC,IAAI;wBAAE,YAAY,CAAC,IAAI,GAAG,EAAE,CAAC;oBAC/C,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,gBAAgB,CAAC,CAAC;iBACzD;gBACD,IAAI,WAAW,CAAC,SAAS,EAAE;oBACzB,YAAY,CAAC,WAAW,GAAG,WAAW,CAAC,WAAW,CAAC;iBACpD;aACF;iBAAM,IAAI,eAAe,CAAC,IAAI,KAAK,YAAY,EAAE;gBAChD,MAAM,eAAe,GAAG,eAAsC,CAAC;gBAC/D,YAAY,CAAC,MAAM,GAAG,eAAe,CAAC,MAAM,IAAI,EAAE,CAAC;gBACnD,IAAI,eAAe,CAAC,SAAS,EAAE;oBAC7B,YAAY,CAAC,WAAW,GAAG,eAAe,CAAC,WAAW,CAAC;iBACxD;aACF;SACF;aAAM;YACL,YAAY,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;SAC3C;QAED,+EAA+E;QAC/E,IAAI,CAAC,YAAY,CAAC,WAAW,IAAI,MAAM,CAAC,WAAW,EAAE;YACnD,YAAY,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;SAC/C;QAED,IAAI,YAAY,CAAC,eAAe,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE;YACzD,MAAM,UAAU,GAAG,aAAa,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;YACzD,IAAI,UAAU;gBAAE,YAAY,CAAC,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;SACvE;QAED,IAAI,aAAa,GACf,QAAQ;YACR,QAAQ,CAAC,WAAW,CAAC,cAAc,EAAE,CAAC,GAAqB,EAAE,EAAE;gBAC7D,MAAM,EAAE,SAAS,EAAE,WAAW,EAAE,GAAG,GAAG,CAAC;gBACvC,IAAI,SAAS,KAAK,IAAI,CAAC,EAAE;oBAAE,OAAO;gBAClC,QAAQ,CAAC,WAAW,CAAC,CAAC;YACxB,CAAC,CAAC,CAAC;QAEL,IAAI,CAAC,YAAY,CAAC,MAAM;YAAE,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAC;QAEhE,MAAM,OAAO,GAAG,QAAQ,CAAC,UAAU,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC,CAAC;QAClF,OAAO,OAAO;aACX,IAAI,CAAC,CAAC,gBAAgB,EAAE,EAAE;YACzB,aAAa,aAAb,aAAa,uBAAb,aAAa,CAAE,MAAM,GAAG;YACxB,aAAa,GAAG,IAAI,CAAC;YACrB,OAAO,gBAAgB,CAAC;QAC1B,CAAC,CAAC;aACD,KAAK,CAAC,CAAC,GAAQ,EAAE,EAAE;YAClB,aAAa,aAAb,aAAa,uBAAb,aAAa,CAAE,MAAM,GAAG;YACxB,aAAa,GAAG,IAAI,CAAC;YACrB,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC,CAAC;IACP,CAAC;IAED,cAAc;QACZ,OAAO,QAAQ,CAAC,cAAc,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC;IAED;;;;;OAKG;IACH,QAAQ,CACN,IAAY,EACZ,EACE,WAAW,EAAE,UAAU,MAGrB,EAAE;QAEN,OAAO,QAAQ,CAAC,QAAQ,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,UAAU,EAAE,CAAC,CAAC;IACjF,CAAC;IAED,UAAU,CAAC,MAAgB;QACzB,OAAO,QAAQ,CAAC,UAAU,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,CAAC,CAAC;IAC7D,CAAC;IAED,SAAS,CACP,IAAY,EACZ,MAAwB;QAExB,OAAO,QAAQ,CAAC,SAAS,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,IAAI,EAAE,EAAE,CAAC,CAAC;IAChF,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,MAAM,CACV,KAAa,EACb,SAAmB,EACnB,MAAqB;QAErB,MAAM,OAAO,GAAG,MAAM,QAAQ,CAAC,MAAM,CAAC;YACpC,SAAS,EAAE,IAAI,CAAC,EAAE;YAClB,KAAK;YACL,SAAS;YACT,MAAM,EAAE,MAAM,IAAI,EAAE;SACrB,CAAC,CAAC;QAEH,8DAA8D;QAC9D,OAAO,OAAO;aACX,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,iCACZ,MAAM,KACT,QAAQ,EAAE,SAAS,CAAC,MAAM,CAAC,KAAK,CAAC,IACjC,CAAC;aACF,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC;IACvC,CAAC;IAED,KAAK,CAAC,KAAK,CACT,EAAU,EACV,EAAU,EACV,EAAU,EACV,EAAU;QAEV,MAAM,MAAM,GAAG,MAAM,QAAQ,CAAC,KAAK,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;QAC5E,MAAM,CAAC,SAAS,EAAE,SAAS,EAAE,YAAY,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC,GACpE,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;QACrB,OAAO;YACL,SAAS;YACT,SAAS;YACT,YAAY;YACZ,KAAK;YACL,KAAK;YACL,KAAK;YACL,KAAK;SACN,CAAC;IACJ,CAAC;IAED,KAAK,CAAC,iBAAiB,CACrB,QAAkD;QAElD,IAAI,YAAY,GAA6C,EAAE,CAAC;QAChE,IAAI,QAAQ;YACV,YAAY,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;gBAClC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;gBACrC,MAAM,EAAE,CAAC,CAAC,MAAM;aACjB,CAAC,CAAC,CAAC;QACN,OAAO,QAAQ,CAAC,iBAAiB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC;IAC1E,CAAC;IAED,KAAK,CAAC,kBAAkB;QACtB,OAAO,QAAQ,CAAC,kBAAkB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IAC7D,CAAC;IAED,KAAK,CAAC,qBAAqB;QAGzB,OAAO,QAAQ,CAAC,qBAAqB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IAChE,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,cAAc,CAAC,EACnB,IAAI,EACJ,OAAO,EAAE,MAAM,GAIhB;QACC,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;YAAE,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACrD,OAAO,QAAQ,CAAC,cAAc,CAAC;YAC7B,SAAS,EAAE,IAAI,CAAC,EAAE;YAClB,MAAM,EAAE;gBACN,IAAI;gBACJ,OAAO,EAAE,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,IAAI;aACxB;SACF,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,mBAAmB;QACvB,OAAO,MAAM,QAAQ,CAAC,mBAAmB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IACpE,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,oBAAoB;QAIxB,OAAO,MAAM,QAAQ,CAAC,oBAAoB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IACrE,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,iBAAiB;QACrB,OAAO,MAAM,QAAQ,CAAC,iBAAiB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IAClE,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAsC;QAC7E,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;YAAE,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACrD,OAAO,MAAM,QAAQ,CAAC,WAAW,CAAC;YAChC,SAAS,EAAE,IAAI,CAAC,EAAE;YAClB,MAAM,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE;SAClC,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,gBAAgB;QACpB,OAAO,MAAM,QAAQ,CAAC,gBAAgB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IACjE,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,2BAA2B,CAC/B,OAAsB,EACtB,WAAmB;QAKnB,OAAO,MAAM,QAAQ,CAAC,2BAA2B,CAAC;YAChD,SAAS,EAAE,IAAI,CAAC,EAAE;YAClB,cAAc,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;YACtD,WAAW;SACZ,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,6BAA6B,CACjC,WAAmB;QAEnB,OAAO,MAAM,QAAQ,CAAC,6BAA6B,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,CAAC;IAC3F,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,iBAAiB,CAAC,MAAgB;QACtC,OAAO,MAAM,QAAQ,CAAC,iBAAiB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,CAAC,CAAC;IAC1E,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,cAAc;QAClB,OAAO,MAAM,QAAQ,CAAC,cAAc,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IAC/D,CAAC;IAED,KAAK,CAAC,OAAO;QACX,OAAO,QAAQ,CAAC,cAAc,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC;CACF;AAED,MAAM,CAAC,KAAK,UAAU,eAAe,CAAC,OAAgB;IACpD,OAAO,QAAQ,CAAC,eAAe,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC;AAC/C,CAAC;AAED,MAAM,UAAU,oBAAoB,CAClC,QAA+C;IAE/C,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;IAC5B,OAAO;QACL,MAAM,EAAE,GAAG,EAAE;YACX,YAAY,CAAC,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;QACzD,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,eAAe,CAAC,KAAa;IACjD,OAAO,QAAQ,CAAC,eAAe,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC;AAC7C,CAAC;AAED,IAAI,gBAAgB,GAAG,CAAC,CAAC;AACzB,MAAM,eAAe,GAAG,GAAG,EAAE,CAC3B,OAAO,CAAC,GAAG,CAAC,QAAQ,KAAK,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,MAAM,CAAC,CAAC;AAE3E,MAAM,aAAa,GAAG;IACpB,eAAe;IACf,uBAAuB;IACvB,2BAA2B;IAC3B,uBAAuB;IACvB,uBAAuB;CACxB,CAAC;AAEF,MAAM,CAAC,KAAK,UAAU,kBAAkB,CAAC,KAAa;IACpD,IAAI,IAAI,GAAG,KAAK,CAAC;IACjB,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;QAAE,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IACrD,OAAO,QAAQ,CAAC,SAAS,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,aAAa,EAAE,CAAC,CAAC;AAC3D,CAAC;AAED,MAAM,WAAW,GAAG;IAClB,iCAAiC;IACjC,IAAI,EAAE,CAAC;IACP,IAAI,EAAE,CAAC;IACP,GAAG,EAAE,CAAC;IACN,IAAI,EAAE,CAAC;IACP,IAAI,EAAE,CAAC;CACR,CAAC;AAEF,MAAM,CAAC,KAAK,UAAU,SAAS,CAC7B,EAOgB,EAChB,UAAuC;QARvC,EACE,KAAK,EACL,cAAc,EAAE,YAAY,EAC5B,YAAY,EAAE,WAAW,EACzB,IAAI,EACJ,SAAS,EAAE,QAAQ,OAEL,EADX,IAAI,cANT,gEAOC,CADQ;IAIT,IAAI,IAAI,GAAG,KAAK,CAAC;IACjB,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;QAAE,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAErD,IAAI,QAAQ,GAAG,IAAI,CAAC;IACpB,IAAI,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,UAAU,CAAC,SAAS;QAAG,QAAQ,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAElE,IAAI,YAAY,GAA6C,EAAE,CAAC;IAChE,IAAI,QAAQ;QACV,YAAY,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;YAClC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;YACrC,MAAM,EAAE,CAAC,CAAC,MAAM;SACjB,CAAC,CAAC,CAAC;IAEN,MAAM,SAAS,GAAG,gBAAgB,GAAG,eAAe,EAAE,CAAC;IACvD,gBAAgB,IAAI,CAAC,CAAC;IAEtB,IAAI,sBAAsB,GAAQ,IAAI,CAAC;IACvC,IAAI,UAAU,EAAE;QACd,sBAAsB,GAAG,QAAQ,CAAC,WAAW,CAC3C,8BAA8B,EAC9B,CAAC,GAA4C,EAAE,EAAE;YAC/C,IAAI,GAAG,CAAC,SAAS,KAAK,SAAS;gBAAE,OAAO;YACxC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;QAC3B,CAAC,CACF,CAAC;KACH;IAED,MAAM,QAAQ,GAAG,WAAW,CAAC,WAAuC,CAAC,CAAC;IAEtE,IAAI,IAAI,CAAC,YAAY,IAAI,CAAC,eAAe,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;QACrE,OAAO,CAAC,IAAI,CAAC,+CAA+C,IAAI,CAAC,YAAY,uBAAuB,CAAC,CAAC;QACtG,OAAO,IAAI,CAAC,YAAY,CAAC;KAC1B;IACD,IAAI,IAAI,CAAC,YAAY,IAAI,CAAC,eAAe,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;QACrE,OAAO,CAAC,IAAI,CAAC,+CAA+C,IAAI,CAAC,YAAY,uBAAuB,CAAC,CAAC;QACtG,OAAO,IAAI,CAAC,YAAY,CAAC;KAC1B;IAED,MAAM,EACJ,GAAG,EACH,WAAW,EACX,KAAK,EAAE,YAAY,EACnB,UAAU,GACX,GAAG,MAAM,QAAQ,CAAC,WAAW,CAAC;QAC7B,SAAS;QACT,MAAM,kBACJ,KAAK,EAAE,IAAI,EACX,cAAc,EAAE,CAAC,CAAC,YAAY,EAC9B,qBAAqB,EAAE,CAAC,CAAC,UAAU,EACnC,YAAY,EAAE,QAAQ,EACtB,IAAI,EAAE,QAAQ,EACd,SAAS,EAAE,YAAY,IACpB,IAAI,CACR;KACF,CAAC,CAAC,KAAK,CAAC,CAAC,GAAQ,EAAE,EAAE;QACpB,sBAAsB,aAAtB,sBAAsB,uBAAtB,sBAAsB,CAAE,MAAM,GAAG;QACjC,MAAM,GAAG,CAAC;IACZ,CAAC,CAAC,CAAC;IACH,sBAAsB,aAAtB,sBAAsB,uBAAtB,sBAAsB,CAAE,MAAM,GAAG;IACjC,OAAO,IAAI,YAAY,CAAC;QACtB,SAAS;QACT,GAAG;QACH,WAAW;QACX,KAAK,EAAE,YAAY;QACnB,UAAU;KACX,CAAC,CAAC;AACL,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,eAAe;IACnC,OAAO,QAAQ,CAAC,kBAAkB,EAAE,CAAC;AACvC,CAAC;AAED,MAAM,CAAC,MAAM,SAAS,GAAG;IACvB,MAAM,EAAE,OAAO;IACf,MAAM,EAAE,qBAAqB;CAC9B,CAAC;AAEF,yCAAyC;AACzC,OAAO,EAAE,QAAQ,EAAE,CAAC","sourcesContent":["import { registerPlugin } from '@capacitor/core';\nimport type {\n NativeContextParams,\n NativeLlamaContext,\n NativeCompletionParams,\n NativeCompletionTokenProb,\n NativeCompletionResult,\n NativeTokenizeResult,\n NativeEmbeddingResult,\n NativeSessionLoadResult,\n NativeEmbeddingParams,\n NativeRerankParams,\n NativeRerankResult,\n NativeCompletionTokenProbItem,\n NativeCompletionResultTimings,\n JinjaFormattedChatResult,\n FormattedChatResult,\n NativeImageProcessingResult,\n NativeLlamaChatMessage,\n LlamaCppMessagePart,\n LlamaCppOAICompatibleMessage,\n ContextParams,\n EmbeddingParams,\n RerankParams,\n RerankResult,\n CompletionResponseFormat,\n CompletionParams,\n BenchResult,\n LlamaCppPlugin,\n} from './definitions';\n\n// Constants\nexport const LLAMACPP_MTMD_DEFAULT_MEDIA_MARKER = '<__media__>';\n\n// Event names\nconst EVENT_ON_INIT_CONTEXT_PROGRESS = '@LlamaCpp_onInitContextProgress';\nconst EVENT_ON_TOKEN = '@LlamaCpp_onToken';\nconst EVENT_ON_NATIVE_LOG = '@LlamaCpp_onNativeLog';\n\n// Register the plugin\nconst LlamaCpp = registerPlugin<LlamaCppPlugin>('LlamaCpp');\n\n// Log listeners management\nconst logListeners: Array<(level: string, text: string) => void> = [];\n\n// Set up native log listener\nLlamaCpp.addListener(EVENT_ON_NATIVE_LOG, (evt: { level: string; text: string }) => {\n logListeners.forEach((listener) => listener(evt.level, evt.text));\n});\n\n// Trigger unset to use default log callback\nLlamaCpp?.toggleNativeLog?.({ enabled: false })?.catch?.(() => {});\n\n// High-level types for the plugin interface\nexport type RNLlamaMessagePart = LlamaCppMessagePart;\nexport type RNLlamaOAICompatibleMessage = LlamaCppOAICompatibleMessage;\n\n// Re-export all types from definitions\nexport type {\n NativeContextParams,\n NativeLlamaContext,\n NativeCompletionParams,\n NativeCompletionTokenProb,\n NativeCompletionResult,\n NativeTokenizeResult,\n NativeEmbeddingResult,\n NativeSessionLoadResult,\n NativeEmbeddingParams,\n NativeRerankParams,\n NativeRerankResult,\n NativeCompletionTokenProbItem,\n NativeCompletionResultTimings,\n FormattedChatResult,\n JinjaFormattedChatResult,\n NativeImageProcessingResult,\n ContextParams,\n EmbeddingParams,\n RerankParams,\n RerankResult,\n CompletionResponseFormat,\n CompletionParams,\n BenchResult,\n};\n\nexport const RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER = LLAMACPP_MTMD_DEFAULT_MEDIA_MARKER;\n\nexport type ToolCall = {\n type: 'function';\n id?: string;\n function: {\n name: string;\n arguments: string; // JSON string\n };\n};\n\nexport type TokenData = {\n token: string;\n completion_probabilities?: Array<NativeCompletionTokenProb>;\n // Parsed content from accumulated text\n content?: string;\n reasoning_content?: string;\n tool_calls?: Array<ToolCall>;\n accumulated_text?: string;\n};\n\ntype TokenNativeEvent = {\n contextId: number;\n tokenResult: TokenData;\n};\n\nconst validCacheTypes = [\n 'f16',\n 'f32',\n 'bf16',\n 'q8_0',\n 'q4_0',\n 'q4_1',\n 'iq4_nl',\n 'q5_0',\n 'q5_1',\n];\n\nconst getJsonSchema = (responseFormat?: CompletionResponseFormat) => {\n if (responseFormat?.type === 'json_schema') {\n return responseFormat.json_schema?.schema;\n }\n if (responseFormat?.type === 'json_object') {\n return responseFormat.schema || {};\n }\n return null;\n};\n\nexport class LlamaContext {\n id: number;\n gpu: boolean = false;\n reasonNoGPU: string = '';\n model: NativeLlamaContext['model'];\n\n constructor({ contextId, gpu, reasonNoGPU, model }: NativeLlamaContext) {\n this.id = contextId;\n this.gpu = gpu;\n this.reasonNoGPU = reasonNoGPU;\n this.model = model;\n }\n\n /**\n * Load cached prompt & completion state from a file.\n */\n async loadSession(filepath: string): Promise<NativeSessionLoadResult> {\n let path = filepath;\n if (path.startsWith('file://')) path = path.slice(7);\n return LlamaCpp.loadSession({ contextId: this.id, filepath: path });\n }\n\n /**\n * Save current cached prompt & completion state to a file.\n */\n async saveSession(\n filepath: string,\n options?: { tokenSize: number },\n ): Promise<number> {\n return LlamaCpp.saveSession({ \n contextId: this.id, \n filepath, \n size: options?.tokenSize || -1 \n });\n }\n\n isLlamaChatSupported(): boolean {\n return !!this.model.chatTemplates.llamaChat;\n }\n\n isJinjaSupported(): boolean {\n const { minja } = this.model.chatTemplates;\n return !!minja?.toolUse || !!minja?.default;\n }\n\n async getFormattedChat(\n messages: RNLlamaOAICompatibleMessage[],\n template?: string | null,\n params?: {\n jinja?: boolean;\n response_format?: CompletionResponseFormat;\n tools?: object;\n parallel_tool_calls?: object;\n tool_choice?: string;\n enable_thinking?: boolean;\n add_generation_prompt?: boolean;\n now?: string | number;\n chat_template_kwargs?: Record<string, string>;\n },\n ): Promise<FormattedChatResult | JinjaFormattedChatResult> {\n const mediaPaths: string[] = [];\n const chat = messages.map((msg) => {\n if (Array.isArray(msg.content)) {\n const content = msg.content.map((part) => {\n // Handle multimodal content\n if (part.type === 'image_url') {\n let path = part.image_url?.url || '';\n if (path?.startsWith('file://')) path = path.slice(7);\n mediaPaths.push(path);\n return {\n type: 'text',\n text: RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER,\n };\n } else if (part.type === 'input_audio') {\n const { input_audio: audio } = part;\n if (!audio) throw new Error('input_audio is required');\n\n const { format } = audio;\n if (format != 'wav' && format != 'mp3') {\n throw new Error(`Unsupported audio format: ${format}`);\n }\n if (audio.url) {\n const path = audio.url.replace(/file:\\/\\//, '');\n mediaPaths.push(path);\n } else if (audio.data) {\n mediaPaths.push(audio.data);\n }\n return {\n type: 'text',\n text: RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER,\n };\n }\n return part;\n });\n\n return {\n ...msg,\n content,\n };\n }\n return msg;\n }) as NativeLlamaChatMessage[];\n\n const useJinja = this.isJinjaSupported() && params?.jinja;\n let tmpl;\n if (template) tmpl = template; // Force replace if provided\n const jsonSchema = getJsonSchema(params?.response_format);\n\n const result = await LlamaCpp.getFormattedChat({\n contextId: this.id,\n messages: JSON.stringify(chat),\n chatTemplate: tmpl,\n params: {\n jinja: useJinja,\n json_schema: jsonSchema ? JSON.stringify(jsonSchema) : undefined,\n tools: params?.tools ? JSON.stringify(params.tools) : undefined,\n parallel_tool_calls: params?.parallel_tool_calls\n ? JSON.stringify(params.parallel_tool_calls)\n : undefined,\n tool_choice: params?.tool_choice,\n enable_thinking: params?.enable_thinking ?? true,\n add_generation_prompt: params?.add_generation_prompt,\n now: typeof params?.now === 'number' ? params.now.toString() : params?.now,\n chat_template_kwargs: params?.chat_template_kwargs ? JSON.stringify(\n Object.entries(params.chat_template_kwargs).reduce((acc, [key, value]) => {\n acc[key] = JSON.stringify(value); // Each value is a stringified JSON object\n return acc;\n }, {} as Record<string, any>)\n ) : undefined,\n },\n });\n\n if (!useJinja) {\n return {\n type: 'llama-chat',\n prompt: result as string,\n has_media: mediaPaths.length > 0,\n media_paths: mediaPaths,\n };\n }\n const jinjaResult = result as JinjaFormattedChatResult;\n jinjaResult.type = 'jinja';\n jinjaResult.has_media = mediaPaths.length > 0;\n jinjaResult.media_paths = mediaPaths;\n return jinjaResult;\n }\n\n /**\n * Generate a completion based on the provided parameters\n * @param params Completion parameters including prompt or messages\n * @param callback Optional callback for token-by-token streaming\n * @returns Promise resolving to the completion result\n *\n * Note: For multimodal support, you can include an media_paths parameter.\n * This will process the images and add them to the context before generating text.\n * Multimodal support must be enabled via initMultimodal() first.\n */\n async completion(\n params: CompletionParams,\n callback?: (data: TokenData) => void,\n ): Promise<NativeCompletionResult> {\n const nativeParams = {\n ...params,\n prompt: params.prompt || '',\n emit_partial_completion: !!callback,\n };\n\n if (params.messages) {\n const formattedResult = await this.getFormattedChat(\n params.messages,\n params.chat_template || params.chatTemplate,\n {\n jinja: params.jinja,\n tools: params.tools,\n parallel_tool_calls: params.parallel_tool_calls,\n tool_choice: params.tool_choice,\n enable_thinking: params.enable_thinking,\n add_generation_prompt: params.add_generation_prompt,\n now: params.now,\n chat_template_kwargs: params.chat_template_kwargs,\n },\n );\n if (formattedResult.type === 'jinja') {\n const jinjaResult = formattedResult as JinjaFormattedChatResult;\n\n nativeParams.prompt = jinjaResult.prompt || '';\n if (typeof jinjaResult.chat_format === 'number')\n nativeParams.chat_format = jinjaResult.chat_format;\n if (jinjaResult.grammar) nativeParams.grammar = jinjaResult.grammar;\n if (typeof jinjaResult.grammar_lazy === 'boolean')\n nativeParams.grammar_lazy = jinjaResult.grammar_lazy;\n if (jinjaResult.grammar_triggers)\n nativeParams.grammar_triggers = jinjaResult.grammar_triggers;\n if (jinjaResult.preserved_tokens)\n nativeParams.preserved_tokens = jinjaResult.preserved_tokens;\n if (jinjaResult.additional_stops) {\n if (!nativeParams.stop) nativeParams.stop = [];\n nativeParams.stop.push(...jinjaResult.additional_stops);\n }\n if (jinjaResult.has_media) {\n nativeParams.media_paths = jinjaResult.media_paths;\n }\n } else if (formattedResult.type === 'llama-chat') {\n const llamaChatResult = formattedResult as FormattedChatResult;\n nativeParams.prompt = llamaChatResult.prompt || '';\n if (llamaChatResult.has_media) {\n nativeParams.media_paths = llamaChatResult.media_paths;\n }\n }\n } else {\n nativeParams.prompt = params.prompt || '';\n }\n\n // If media_paths were explicitly provided or extracted from messages, use them\n if (!nativeParams.media_paths && params.media_paths) {\n nativeParams.media_paths = params.media_paths;\n }\n\n if (nativeParams.response_format && !nativeParams.grammar) {\n const jsonSchema = getJsonSchema(params.response_format);\n if (jsonSchema) nativeParams.json_schema = JSON.stringify(jsonSchema);\n }\n\n let tokenListener: any =\n callback &&\n LlamaCpp.addListener(EVENT_ON_TOKEN, (evt: TokenNativeEvent) => {\n const { contextId, tokenResult } = evt;\n if (contextId !== this.id) return;\n callback(tokenResult);\n });\n\n if (!nativeParams.prompt) throw new Error('Prompt is required');\n\n const promise = LlamaCpp.completion({ contextId: this.id, params: nativeParams });\n return promise\n .then((completionResult) => {\n tokenListener?.remove();\n tokenListener = null;\n return completionResult;\n })\n .catch((err: any) => {\n tokenListener?.remove();\n tokenListener = null;\n throw err;\n });\n }\n\n stopCompletion(): Promise<void> {\n return LlamaCpp.stopCompletion({ contextId: this.id });\n }\n\n /**\n * Tokenize text or text with images\n * @param text Text to tokenize\n * @param params.media_paths Array of image paths to tokenize (if multimodal is enabled)\n * @returns Promise resolving to the tokenize result\n */\n tokenize(\n text: string,\n {\n media_paths: mediaPaths,\n }: {\n media_paths?: string[];\n } = {},\n ): Promise<NativeTokenizeResult> {\n return LlamaCpp.tokenize({ contextId: this.id, text, imagePaths: mediaPaths });\n }\n\n detokenize(tokens: number[]): Promise<string> {\n return LlamaCpp.detokenize({ contextId: this.id, tokens });\n }\n\n embedding(\n text: string,\n params?: EmbeddingParams,\n ): Promise<NativeEmbeddingResult> {\n return LlamaCpp.embedding({ contextId: this.id, text, params: params || {} });\n }\n\n /**\n * Rerank documents based on relevance to a query\n * @param query The query text to rank documents against\n * @param documents Array of document texts to rank\n * @param params Optional reranking parameters\n * @returns Promise resolving to an array of ranking results with scores and indices\n */\n async rerank(\n query: string,\n documents: string[],\n params?: RerankParams,\n ): Promise<RerankResult[]> {\n const results = await LlamaCpp.rerank({ \n contextId: this.id, \n query, \n documents, \n params: params || {} \n });\n\n // Sort by score descending and add document text if requested\n return results\n .map((result) => ({\n ...result,\n document: documents[result.index],\n }))\n .sort((a, b) => b.score - a.score);\n }\n\n async bench(\n pp: number,\n tg: number,\n pl: number,\n nr: number,\n ): Promise<BenchResult> {\n const result = await LlamaCpp.bench({ contextId: this.id, pp, tg, pl, nr });\n const [modelDesc, modelSize, modelNParams, ppAvg, ppStd, tgAvg, tgStd] =\n JSON.parse(result);\n return {\n modelDesc,\n modelSize,\n modelNParams,\n ppAvg,\n ppStd,\n tgAvg,\n tgStd,\n };\n }\n\n async applyLoraAdapters(\n loraList: Array<{ path: string; scaled?: number }>,\n ): Promise<void> {\n let loraAdapters: Array<{ path: string; scaled?: number }> = [];\n if (loraList)\n loraAdapters = loraList.map((l) => ({\n path: l.path.replace(/file:\\/\\//, ''),\n scaled: l.scaled,\n }));\n return LlamaCpp.applyLoraAdapters({ contextId: this.id, loraAdapters });\n }\n\n async removeLoraAdapters(): Promise<void> {\n return LlamaCpp.removeLoraAdapters({ contextId: this.id });\n }\n\n async getLoadedLoraAdapters(): Promise<\n Array<{ path: string; scaled?: number }>\n > {\n return LlamaCpp.getLoadedLoraAdapters({ contextId: this.id });\n }\n\n /**\n * Initialize multimodal support with a mmproj file\n * @param params Parameters for multimodal support\n * @param params.path Path to the multimodal projector file\n * @param params.use_gpu Whether to use GPU\n * @returns Promise resolving to true if initialization was successful\n */\n async initMultimodal({\n path,\n use_gpu: useGpu,\n }: {\n path: string;\n use_gpu?: boolean;\n }): Promise<boolean> {\n if (path.startsWith('file://')) path = path.slice(7);\n return LlamaCpp.initMultimodal({\n contextId: this.id,\n params: {\n path,\n use_gpu: useGpu ?? true,\n },\n });\n }\n\n /**\n * Check if multimodal support is enabled\n * @returns Promise resolving to true if multimodal is enabled\n */\n async isMultimodalEnabled(): Promise<boolean> {\n return await LlamaCpp.isMultimodalEnabled({ contextId: this.id });\n }\n\n /**\n * Check multimodal support\n * @returns Promise resolving to an object with vision and audio support\n */\n async getMultimodalSupport(): Promise<{\n vision: boolean;\n audio: boolean;\n }> {\n return await LlamaCpp.getMultimodalSupport({ contextId: this.id });\n }\n\n /**\n * Release multimodal support\n * @returns Promise resolving to void\n */\n async releaseMultimodal(): Promise<void> {\n return await LlamaCpp.releaseMultimodal({ contextId: this.id });\n }\n\n /**\n * Initialize TTS support with a vocoder model\n * @param params Parameters for TTS support\n * @param params.path Path to the vocoder model\n * @param params.n_batch Batch size for the vocoder model\n * @returns Promise resolving to true if initialization was successful\n */\n async initVocoder({ path, n_batch: nBatch }: { path: string; n_batch?: number }): Promise<boolean> {\n if (path.startsWith('file://')) path = path.slice(7);\n return await LlamaCpp.initVocoder({ \n contextId: this.id, \n params: { path, n_batch: nBatch } \n });\n }\n\n /**\n * Check if TTS support is enabled\n * @returns Promise resolving to true if TTS is enabled\n */\n async isVocoderEnabled(): Promise<boolean> {\n return await LlamaCpp.isVocoderEnabled({ contextId: this.id });\n }\n\n /**\n * Get a formatted audio completion prompt\n * @param speakerJsonStr JSON string representing the speaker\n * @param textToSpeak Text to speak\n * @returns Promise resolving to the formatted audio completion result with prompt and grammar\n */\n async getFormattedAudioCompletion(\n speaker: object | null,\n textToSpeak: string,\n ): Promise<{\n prompt: string;\n grammar?: string;\n }> {\n return await LlamaCpp.getFormattedAudioCompletion({\n contextId: this.id,\n speakerJsonStr: speaker ? JSON.stringify(speaker) : '',\n textToSpeak,\n });\n }\n\n /**\n * Get guide tokens for audio completion\n * @param textToSpeak Text to speak\n * @returns Promise resolving to the guide tokens\n */\n async getAudioCompletionGuideTokens(\n textToSpeak: string,\n ): Promise<Array<number>> {\n return await LlamaCpp.getAudioCompletionGuideTokens({ contextId: this.id, textToSpeak });\n }\n\n /**\n * Decode audio tokens\n * @param tokens Array of audio tokens\n * @returns Promise resolving to the decoded audio tokens\n */\n async decodeAudioTokens(tokens: number[]): Promise<Array<number>> {\n return await LlamaCpp.decodeAudioTokens({ contextId: this.id, tokens });\n }\n\n /**\n * Release TTS support\n * @returns Promise resolving to void\n */\n async releaseVocoder(): Promise<void> {\n return await LlamaCpp.releaseVocoder({ contextId: this.id });\n }\n\n async release(): Promise<void> {\n return LlamaCpp.releaseContext({ contextId: this.id });\n }\n}\n\nexport async function toggleNativeLog(enabled: boolean): Promise<void> {\n return LlamaCpp.toggleNativeLog({ enabled });\n}\n\nexport function addNativeLogListener(\n listener: (level: string, text: string) => void,\n): { remove: () => void } {\n logListeners.push(listener);\n return {\n remove: () => {\n logListeners.splice(logListeners.indexOf(listener), 1);\n },\n };\n}\n\nexport async function setContextLimit(limit: number): Promise<void> {\n return LlamaCpp.setContextLimit({ limit });\n}\n\nlet contextIdCounter = 0;\nconst contextIdRandom = () =>\n process.env.NODE_ENV === 'test' ? 0 : Math.floor(Math.random() * 100000);\n\nconst modelInfoSkip = [\n // Large fields\n 'tokenizer.ggml.tokens',\n 'tokenizer.ggml.token_type',\n 'tokenizer.ggml.merges',\n 'tokenizer.ggml.scores',\n];\n\nexport async function loadLlamaModelInfo(model: string): Promise<Object> {\n let path = model;\n if (path.startsWith('file://')) path = path.slice(7);\n return LlamaCpp.modelInfo({ path, skip: modelInfoSkip });\n}\n\nconst poolTypeMap = {\n // -1 is unspecified as undefined\n none: 0,\n mean: 1,\n cls: 2,\n last: 3,\n rank: 4,\n};\n\nexport async function initLlama(\n {\n model,\n is_model_asset: isModelAsset,\n pooling_type: poolingType,\n lora,\n lora_list: loraList,\n ...rest\n }: ContextParams,\n onProgress?: (progress: number) => void,\n): Promise<LlamaContext> {\n let path = model;\n if (path.startsWith('file://')) path = path.slice(7);\n\n let loraPath = lora;\n if (loraPath?.startsWith('file://')) loraPath = loraPath.slice(7);\n\n let loraAdapters: Array<{ path: string; scaled?: number }> = [];\n if (loraList)\n loraAdapters = loraList.map((l) => ({\n path: l.path.replace(/file:\\/\\//, ''),\n scaled: l.scaled,\n }));\n\n const contextId = contextIdCounter + contextIdRandom();\n contextIdCounter += 1;\n\n let removeProgressListener: any = null;\n if (onProgress) {\n removeProgressListener = LlamaCpp.addListener(\n EVENT_ON_INIT_CONTEXT_PROGRESS,\n (evt: { contextId: number; progress: number }) => {\n if (evt.contextId !== contextId) return;\n onProgress(evt.progress);\n },\n );\n }\n\n const poolType = poolTypeMap[poolingType as keyof typeof poolTypeMap];\n\n if (rest.cache_type_k && !validCacheTypes.includes(rest.cache_type_k)) {\n console.warn(`[LlamaCpp] initLlama: Invalid cache K type: ${rest.cache_type_k}, falling back to f16`);\n delete rest.cache_type_k;\n }\n if (rest.cache_type_v && !validCacheTypes.includes(rest.cache_type_v)) {\n console.warn(`[LlamaCpp] initLlama: Invalid cache V type: ${rest.cache_type_v}, falling back to f16`);\n delete rest.cache_type_v;\n }\n\n const {\n gpu,\n reasonNoGPU,\n model: modelDetails,\n androidLib,\n } = await LlamaCpp.initContext({\n contextId,\n params: {\n model: path,\n is_model_asset: !!isModelAsset,\n use_progress_callback: !!onProgress,\n pooling_type: poolType,\n lora: loraPath,\n lora_list: loraAdapters,\n ...rest,\n },\n }).catch((err: any) => {\n removeProgressListener?.remove();\n throw err;\n });\n removeProgressListener?.remove();\n return new LlamaContext({\n contextId,\n gpu,\n reasonNoGPU,\n model: modelDetails,\n androidLib,\n });\n}\n\nexport async function releaseAllLlama(): Promise<void> {\n return LlamaCpp.releaseAllContexts();\n}\n\nexport const BuildInfo = {\n number: '1.0.0',\n commit: 'capacitor-llama-cpp',\n};\n\n// Re-export the plugin for direct access\nexport { LlamaCpp };\n"]}
|
package/dist/plugin.cjs.js
CHANGED
|
@@ -1,18 +1,8 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
+
var tslib = require('tslib');
|
|
3
4
|
var core = require('@capacitor/core');
|
|
4
5
|
|
|
5
|
-
var __rest = (undefined && undefined.__rest) || function (s, e) {
|
|
6
|
-
var t = {};
|
|
7
|
-
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
|
8
|
-
t[p] = s[p];
|
|
9
|
-
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
|
10
|
-
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
|
11
|
-
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
|
12
|
-
t[p[i]] = s[p[i]];
|
|
13
|
-
}
|
|
14
|
-
return t;
|
|
15
|
-
};
|
|
16
6
|
var _a, _b, _c;
|
|
17
7
|
// Constants
|
|
18
8
|
const LLAMACPP_MTMD_DEFAULT_MEDIA_MARKER = '<__media__>';
|
|
@@ -461,7 +451,7 @@ const poolTypeMap = {
|
|
|
461
451
|
rank: 4,
|
|
462
452
|
};
|
|
463
453
|
async function initLlama(_a, onProgress) {
|
|
464
|
-
var { model, is_model_asset: isModelAsset, pooling_type: poolingType, lora, lora_list: loraList } = _a, rest = __rest(_a, ["model", "is_model_asset", "pooling_type", "lora", "lora_list"]);
|
|
454
|
+
var { model, is_model_asset: isModelAsset, pooling_type: poolingType, lora, lora_list: loraList } = _a, rest = tslib.__rest(_a, ["model", "is_model_asset", "pooling_type", "lora", "lora_list"]);
|
|
465
455
|
let path = model;
|
|
466
456
|
if (path.startsWith('file://'))
|
|
467
457
|
path = path.slice(7);
|