llama-cpp-capacitor 0.0.13 → 0.0.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LlamaCpp.podspec +17 -17
- package/Package.swift +27 -27
- package/README.md +717 -574
- package/android/build.gradle +88 -69
- package/android/src/main/AndroidManifest.xml +2 -2
- package/android/src/main/CMakeLists-arm64.txt +131 -0
- package/android/src/main/CMakeLists-x86_64.txt +135 -0
- package/android/src/main/CMakeLists.txt +35 -52
- package/android/src/main/java/ai/annadata/plugin/capacitor/LlamaCpp.java +956 -717
- package/android/src/main/java/ai/annadata/plugin/capacitor/LlamaCppPlugin.java +710 -590
- package/android/src/main/jni-utils.h +7 -7
- package/android/src/main/jni.cpp +868 -127
- package/cpp/{rn-completion.cpp → cap-completion.cpp} +202 -24
- package/cpp/{rn-completion.h → cap-completion.h} +22 -11
- package/cpp/{rn-llama.cpp → cap-llama.cpp} +81 -27
- package/cpp/{rn-llama.h → cap-llama.h} +32 -20
- package/cpp/{rn-mtmd.hpp → cap-mtmd.hpp} +15 -15
- package/cpp/{rn-tts.cpp → cap-tts.cpp} +12 -12
- package/cpp/{rn-tts.h → cap-tts.h} +14 -14
- package/cpp/ggml-cpu/ggml-cpu-impl.h +30 -0
- package/dist/docs.json +100 -3
- package/dist/esm/definitions.d.ts +45 -2
- package/dist/esm/definitions.js.map +1 -1
- package/dist/esm/index.d.ts +22 -0
- package/dist/esm/index.js +66 -3
- package/dist/esm/index.js.map +1 -1
- package/dist/plugin.cjs.js +71 -3
- package/dist/plugin.cjs.js.map +1 -1
- package/dist/plugin.js +71 -3
- package/dist/plugin.js.map +1 -1
- package/ios/Sources/LlamaCppPlugin/LlamaCpp.swift +596 -596
- package/ios/Sources/LlamaCppPlugin/LlamaCppPlugin.swift +591 -514
- package/ios/Tests/LlamaCppPluginTests/LlamaCppPluginTests.swift +15 -15
- package/package.json +111 -110
|
@@ -1,717 +1,956 @@
|
|
|
1
|
-
package ai.annadata.plugin.capacitor;
|
|
2
|
-
|
|
3
|
-
import android.util.Log;
|
|
4
|
-
import com.getcapacitor.JSObject;
|
|
5
|
-
import java.util.HashMap;
|
|
6
|
-
import java.util.Map;
|
|
7
|
-
import java.util.concurrent.CompletableFuture;
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
public
|
|
44
|
-
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
public String
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
}
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
public
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
private
|
|
248
|
-
private
|
|
249
|
-
private
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
callback.onResult(LlamaResult.
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
String
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
callback.onResult(LlamaResult.
|
|
454
|
-
}
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
}
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
if (
|
|
671
|
-
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
672
|
-
return;
|
|
673
|
-
}
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
}
|
|
717
|
-
|
|
1
|
+
package ai.annadata.plugin.capacitor;
|
|
2
|
+
|
|
3
|
+
import android.util.Log;
|
|
4
|
+
import com.getcapacitor.JSObject;
|
|
5
|
+
import java.util.HashMap;
|
|
6
|
+
import java.util.Map;
|
|
7
|
+
import java.util.concurrent.CompletableFuture;
|
|
8
|
+
import java.io.File;
|
|
9
|
+
import java.io.FileOutputStream;
|
|
10
|
+
import java.io.IOException;
|
|
11
|
+
import java.io.InputStream;
|
|
12
|
+
import java.net.HttpURLConnection;
|
|
13
|
+
import java.net.URL;
|
|
14
|
+
import java.util.List;
|
|
15
|
+
import android.content.Context;
|
|
16
|
+
import android.os.Environment;
|
|
17
|
+
import java.util.ArrayList;
|
|
18
|
+
|
|
19
|
+
// MARK: - Result Types
|
|
20
|
+
class LlamaResult<T> {
|
|
21
|
+
private final T data;
|
|
22
|
+
private final LlamaError error;
|
|
23
|
+
private final boolean isSuccess;
|
|
24
|
+
|
|
25
|
+
private LlamaResult(T data, LlamaError error, boolean isSuccess) {
|
|
26
|
+
this.data = data;
|
|
27
|
+
this.error = error;
|
|
28
|
+
this.isSuccess = isSuccess;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
public static <T> LlamaResult<T> success(T data) {
|
|
32
|
+
return new LlamaResult<>(data, null, true);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
public static <T> LlamaResult<T> failure(LlamaError error) {
|
|
36
|
+
return new LlamaResult<>(null, error, false);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
public boolean isSuccess() {
|
|
40
|
+
return isSuccess;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
public T getData() {
|
|
44
|
+
return data;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
public LlamaError getError() {
|
|
48
|
+
return error;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
class LlamaError extends Exception {
|
|
53
|
+
public LlamaError(String message) {
|
|
54
|
+
super(message);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// MARK: - Context Management
|
|
59
|
+
class LlamaContext {
|
|
60
|
+
private final int id;
|
|
61
|
+
private LlamaModel model;
|
|
62
|
+
private boolean isMultimodalEnabled = false;
|
|
63
|
+
private boolean isVocoderEnabled = false;
|
|
64
|
+
private long nativeContextId = -1;
|
|
65
|
+
|
|
66
|
+
public LlamaContext(int id) {
|
|
67
|
+
this.id = id;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
public int getId() {
|
|
71
|
+
return id;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
public LlamaModel getModel() {
|
|
75
|
+
return model;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
public void setModel(LlamaModel model) {
|
|
79
|
+
this.model = model;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
public boolean isMultimodalEnabled() {
|
|
83
|
+
return isMultimodalEnabled;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
public void setMultimodalEnabled(boolean multimodalEnabled) {
|
|
87
|
+
isMultimodalEnabled = multimodalEnabled;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
public boolean isVocoderEnabled() {
|
|
91
|
+
return isVocoderEnabled;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
public void setVocoderEnabled(boolean vocoderEnabled) {
|
|
95
|
+
isVocoderEnabled = vocoderEnabled;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
public long getNativeContextId() {
|
|
99
|
+
return nativeContextId;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
public void setNativeContextId(long nativeContextId) {
|
|
103
|
+
this.nativeContextId = nativeContextId;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
class LlamaModel {
|
|
108
|
+
private final String path;
|
|
109
|
+
private final String desc;
|
|
110
|
+
private final int size;
|
|
111
|
+
private final int nEmbd;
|
|
112
|
+
private final int nParams;
|
|
113
|
+
private final ChatTemplates chatTemplates;
|
|
114
|
+
private final Map<String, Object> metadata;
|
|
115
|
+
|
|
116
|
+
public LlamaModel(String path, String desc, int size, int nEmbd, int nParams, ChatTemplates chatTemplates, Map<String, Object> metadata) {
|
|
117
|
+
this.path = path;
|
|
118
|
+
this.desc = desc;
|
|
119
|
+
this.size = size;
|
|
120
|
+
this.nEmbd = nEmbd;
|
|
121
|
+
this.nParams = nParams;
|
|
122
|
+
this.chatTemplates = chatTemplates;
|
|
123
|
+
this.metadata = metadata;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
public String getPath() {
|
|
127
|
+
return path;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
public String getDesc() {
|
|
131
|
+
return desc;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
public int getSize() {
|
|
135
|
+
return size;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
public int getNEmbd() {
|
|
139
|
+
return nEmbd;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
public int getNParams() {
|
|
143
|
+
return nParams;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
public ChatTemplates getChatTemplates() {
|
|
147
|
+
return chatTemplates;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
public Map<String, Object> getMetadata() {
|
|
151
|
+
return metadata;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
class ChatTemplates {
|
|
156
|
+
private final boolean llamaChat;
|
|
157
|
+
private final MinjaTemplates minja;
|
|
158
|
+
|
|
159
|
+
public ChatTemplates(boolean llamaChat, MinjaTemplates minja) {
|
|
160
|
+
this.llamaChat = llamaChat;
|
|
161
|
+
this.minja = minja;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
public boolean isLlamaChat() {
|
|
165
|
+
return llamaChat;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
public MinjaTemplates getMinja() {
|
|
169
|
+
return minja;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
class MinjaTemplates {
|
|
174
|
+
private final boolean default_;
|
|
175
|
+
private final MinjaCaps defaultCaps;
|
|
176
|
+
private final boolean toolUse;
|
|
177
|
+
private final MinjaCaps toolUseCaps;
|
|
178
|
+
|
|
179
|
+
public MinjaTemplates(boolean default_, MinjaCaps defaultCaps, boolean toolUse, MinjaCaps toolUseCaps) {
|
|
180
|
+
this.default_ = default_;
|
|
181
|
+
this.defaultCaps = defaultCaps;
|
|
182
|
+
this.toolUse = toolUse;
|
|
183
|
+
this.toolUseCaps = toolUseCaps;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
public boolean isDefault() {
|
|
187
|
+
return default_;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
public MinjaCaps getDefaultCaps() {
|
|
191
|
+
return defaultCaps;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
public boolean isToolUse() {
|
|
195
|
+
return toolUse;
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
public MinjaCaps getToolUseCaps() {
|
|
199
|
+
return toolUseCaps;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
class MinjaCaps {
|
|
204
|
+
private final boolean tools;
|
|
205
|
+
private final boolean toolCalls;
|
|
206
|
+
private final boolean toolResponses;
|
|
207
|
+
private final boolean systemRole;
|
|
208
|
+
private final boolean parallelToolCalls;
|
|
209
|
+
private final boolean toolCallId;
|
|
210
|
+
|
|
211
|
+
public MinjaCaps(boolean tools, boolean toolCalls, boolean toolResponses, boolean systemRole, boolean parallelToolCalls, boolean toolCallId) {
|
|
212
|
+
this.tools = tools;
|
|
213
|
+
this.toolCalls = toolCalls;
|
|
214
|
+
this.toolResponses = toolResponses;
|
|
215
|
+
this.systemRole = systemRole;
|
|
216
|
+
this.parallelToolCalls = parallelToolCalls;
|
|
217
|
+
this.toolCallId = toolCallId;
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
public boolean isTools() {
|
|
221
|
+
return tools;
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
public boolean isToolCalls() {
|
|
225
|
+
return toolCalls;
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
public boolean isToolResponses() {
|
|
229
|
+
return toolResponses;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
public boolean isSystemRole() {
|
|
233
|
+
return systemRole;
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
public boolean isParallelToolCalls() {
|
|
237
|
+
return parallelToolCalls;
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
public boolean isToolCallId() {
|
|
241
|
+
return toolCallId;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
// MARK: - Main Implementation
|
|
246
|
+
public class LlamaCpp {
|
|
247
|
+
private static final String TAG = "LlamaCpp";
|
|
248
|
+
private final Map<Integer, LlamaContext> contexts = new HashMap<>();
|
|
249
|
+
private int contextCounter = 0;
|
|
250
|
+
private int contextLimit = 10;
|
|
251
|
+
private boolean nativeLogEnabled = false;
|
|
252
|
+
private Context context;
|
|
253
|
+
|
|
254
|
+
// Constructor to receive context
|
|
255
|
+
public LlamaCpp(Context context) {
|
|
256
|
+
this.context = context;
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
// Native method declarations
|
|
260
|
+
private native long initContextNative(String modelPath, String[] searchPaths, JSObject params);
|
|
261
|
+
private native void releaseContextNative(long nativeContextId);
|
|
262
|
+
private native Map<String, Object> completionNative(long contextId, JSObject params);
|
|
263
|
+
private native Map<String, Object> modelInfoNative(String modelPath);
|
|
264
|
+
private native void stopCompletionNative(long contextId);
|
|
265
|
+
private native String getFormattedChatNative(long contextId, String messages, String chatTemplate);
|
|
266
|
+
private native boolean toggleNativeLogNative(boolean enabled);
|
|
267
|
+
|
|
268
|
+
// Model download and management methods
|
|
269
|
+
// Tokenization methods
|
|
270
|
+
private native Map<String, Object> tokenizeNative(long contextId, String text, String[] imagePaths);
|
|
271
|
+
private native String detokenizeNative(long contextId, int[] tokens);
|
|
272
|
+
|
|
273
|
+
// Model download and management methods
|
|
274
|
+
private native String downloadModelNative(String url, String filename);
|
|
275
|
+
private native Map<String, Object> getDownloadProgressNative(String url);
|
|
276
|
+
private native boolean cancelDownloadNative(String url);
|
|
277
|
+
private native List<Map<String, Object>> getAvailableModelsNative();
|
|
278
|
+
|
|
279
|
+
// Grammar utilities
|
|
280
|
+
private native String convertJsonSchemaToGrammarNative(String schemaJson);
|
|
281
|
+
|
|
282
|
+
static {
|
|
283
|
+
try {
|
|
284
|
+
|
|
285
|
+
// Detect the current architecture and load the appropriate library
|
|
286
|
+
String arch = System.getProperty("os.arch");
|
|
287
|
+
String abi = android.os.Build.SUPPORTED_ABIS[0]; // Get primary ABI
|
|
288
|
+
String libraryName;
|
|
289
|
+
|
|
290
|
+
// Map Android ABI to library name
|
|
291
|
+
switch (abi) {
|
|
292
|
+
case "arm64-v8a":
|
|
293
|
+
libraryName = "llama-cpp-arm64";
|
|
294
|
+
break;
|
|
295
|
+
case "armeabi-v7a":
|
|
296
|
+
libraryName = "llama-cpp-armeabi";
|
|
297
|
+
break;
|
|
298
|
+
case "x86":
|
|
299
|
+
libraryName = "llama-cpp-x86";
|
|
300
|
+
break;
|
|
301
|
+
case "x86_64":
|
|
302
|
+
libraryName = "llama-cpp-x86_64";
|
|
303
|
+
break;
|
|
304
|
+
default:
|
|
305
|
+
Log.w(TAG, "Unsupported ABI: " + abi + ", falling back to arm64-v8a");
|
|
306
|
+
libraryName = "llama-cpp-arm64";
|
|
307
|
+
break;
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
Log.i(TAG, "Loading native library for ABI: " + abi + " (library: " + libraryName + ")");
|
|
311
|
+
System.loadLibrary(libraryName);
|
|
312
|
+
Log.i(TAG, "Successfully loaded llama-cpp native library: " + libraryName);
|
|
313
|
+
} catch (UnsatisfiedLinkError e) {
|
|
314
|
+
Log.e(TAG, "Failed to load llama-cpp native library: " + e.getMessage());
|
|
315
|
+
throw e;
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
// MARK: - Core initialization and management
|
|
320
|
+
|
|
321
|
+
public void toggleNativeLog(boolean enabled, LlamaCallback<Void> callback) {
|
|
322
|
+
try {
|
|
323
|
+
boolean result = toggleNativeLogNative(enabled);
|
|
324
|
+
nativeLogEnabled = enabled;
|
|
325
|
+
if (enabled) {
|
|
326
|
+
Log.i(TAG, "Native logging enabled");
|
|
327
|
+
} else {
|
|
328
|
+
Log.i(TAG, "Native logging disabled");
|
|
329
|
+
}
|
|
330
|
+
callback.onResult(LlamaResult.success(null));
|
|
331
|
+
} catch (Exception e) {
|
|
332
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Failed to toggle native log: " + e.getMessage())));
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
public void setContextLimit(int limit, LlamaCallback<Void> callback) {
|
|
337
|
+
contextLimit = limit;
|
|
338
|
+
Log.i(TAG, "Context limit set to " + limit);
|
|
339
|
+
callback.onResult(LlamaResult.success(null));
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
public void downloadModel(String url, String filename, LlamaCallback<String> callback) {
|
|
343
|
+
try {
|
|
344
|
+
Log.i(TAG, "Starting download of model: " + filename + " from: " + url);
|
|
345
|
+
String localPath = downloadModelNative(url, filename);
|
|
346
|
+
|
|
347
|
+
// Start download in background thread
|
|
348
|
+
new Thread(() -> {
|
|
349
|
+
try {
|
|
350
|
+
downloadFile(url, localPath, callback);
|
|
351
|
+
} catch (Exception e) {
|
|
352
|
+
Log.e(TAG, "Error in download thread: " + e.getMessage());
|
|
353
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Download failed: " + e.getMessage())));
|
|
354
|
+
}
|
|
355
|
+
}).start();
|
|
356
|
+
|
|
357
|
+
// Return the local path immediately
|
|
358
|
+
callback.onResult(LlamaResult.success(localPath));
|
|
359
|
+
|
|
360
|
+
} catch (Exception e) {
|
|
361
|
+
Log.e(TAG, "Error preparing download: " + e.getMessage());
|
|
362
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Download preparation failed: " + e.getMessage())));
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
private void downloadFile(String url, String localPath, LlamaCallback<String> callback) {
|
|
367
|
+
try {
|
|
368
|
+
URL downloadUrl = new URL(url);
|
|
369
|
+
HttpURLConnection connection = (HttpURLConnection) downloadUrl.openConnection();
|
|
370
|
+
connection.setRequestMethod("GET");
|
|
371
|
+
connection.setConnectTimeout(30000);
|
|
372
|
+
connection.setReadTimeout(0); // No timeout for large files
|
|
373
|
+
|
|
374
|
+
int responseCode = connection.getResponseCode();
|
|
375
|
+
if (responseCode != HttpURLConnection.HTTP_OK) {
|
|
376
|
+
throw new IOException("HTTP error code: " + responseCode);
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
long fileSize = connection.getContentLengthLong();
|
|
380
|
+
Log.i(TAG, "File size: " + fileSize + " bytes");
|
|
381
|
+
|
|
382
|
+
try (InputStream inputStream = connection.getInputStream();
|
|
383
|
+
FileOutputStream outputStream = new FileOutputStream(localPath)) {
|
|
384
|
+
|
|
385
|
+
byte[] buffer = new byte[8192];
|
|
386
|
+
long downloadedBytes = 0;
|
|
387
|
+
int bytesRead;
|
|
388
|
+
|
|
389
|
+
while ((bytesRead = inputStream.read(buffer)) != -1) {
|
|
390
|
+
outputStream.write(buffer, 0, bytesRead);
|
|
391
|
+
downloadedBytes += bytesRead;
|
|
392
|
+
|
|
393
|
+
// Log progress every 1MB
|
|
394
|
+
if (downloadedBytes % (1024 * 1024) == 0) {
|
|
395
|
+
double progress = fileSize > 0 ? (double) downloadedBytes / fileSize * 100 : 0;
|
|
396
|
+
Log.i(TAG, String.format("Download progress: %.1f%% (%d/%d bytes)",
|
|
397
|
+
progress, downloadedBytes, fileSize));
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
Log.i(TAG, "Download completed successfully: " + localPath);
|
|
403
|
+
callback.onResult(LlamaResult.success(localPath));
|
|
404
|
+
|
|
405
|
+
} catch (Exception e) {
|
|
406
|
+
Log.e(TAG, "Download failed: " + e.getMessage());
|
|
407
|
+
// Clean up partial file
|
|
408
|
+
try {
|
|
409
|
+
new File(localPath).delete();
|
|
410
|
+
} catch (Exception ignored) {}
|
|
411
|
+
|
|
412
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Download failed: " + e.getMessage())));
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
public void getDownloadProgress(String url, LlamaCallback<Map<String, Object>> callback) {
|
|
417
|
+
try {
|
|
418
|
+
Map<String, Object> progress = getDownloadProgressNative(url);
|
|
419
|
+
if (progress != null) {
|
|
420
|
+
callback.onResult(LlamaResult.success(progress));
|
|
421
|
+
} else {
|
|
422
|
+
callback.onResult(LlamaResult.failure(new LlamaError("No download in progress for this URL")));
|
|
423
|
+
}
|
|
424
|
+
} catch (Exception e) {
|
|
425
|
+
Log.e(TAG, "Error getting download progress: " + e.getMessage());
|
|
426
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Failed to get progress: " + e.getMessage())));
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
public void cancelDownload(String url, LlamaCallback<Boolean> callback) {
|
|
431
|
+
try {
|
|
432
|
+
boolean cancelled = cancelDownloadNative(url);
|
|
433
|
+
callback.onResult(LlamaResult.success(cancelled));
|
|
434
|
+
} catch (Exception e) {
|
|
435
|
+
Log.e(TAG, "Error cancelling download: " + e.getMessage());
|
|
436
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Failed to cancel download: " + e.getMessage())));
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
public void getAvailableModels(LlamaCallback<List<Map<String, Object>>> callback) {
|
|
441
|
+
try {
|
|
442
|
+
List<Map<String, Object>> models = getAvailableModelsNative();
|
|
443
|
+
callback.onResult(LlamaResult.success(models));
|
|
444
|
+
} catch (Exception e) {
|
|
445
|
+
Log.e(TAG, "Error getting available models: " + e.getMessage());
|
|
446
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Failed to get models: " + e.getMessage())));
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
public void convertJsonSchemaToGrammar(String schemaJson, LlamaCallback<String> callback) {
|
|
451
|
+
try {
|
|
452
|
+
String grammar = convertJsonSchemaToGrammarNative(schemaJson);
|
|
453
|
+
callback.onResult(LlamaResult.success(grammar));
|
|
454
|
+
} catch (Exception e) {
|
|
455
|
+
Log.e(TAG, "Error converting JSON schema to grammar: " + e.getMessage());
|
|
456
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Failed to convert schema: " + e.getMessage())));
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
public void modelInfo(String path, String[] skip, LlamaCallback<Map<String, Object>> callback) {
|
|
461
|
+
try {
|
|
462
|
+
// Call native method to get actual model info
|
|
463
|
+
Map<String, Object> modelInfo = modelInfoNative(path);
|
|
464
|
+
if (modelInfo != null) {
|
|
465
|
+
callback.onResult(LlamaResult.success(modelInfo));
|
|
466
|
+
} else {
|
|
467
|
+
// Fallback to basic info if native method fails
|
|
468
|
+
Map<String, Object> fallbackInfo = new HashMap<>();
|
|
469
|
+
fallbackInfo.put("path", path);
|
|
470
|
+
fallbackInfo.put("desc", "Model file found but info unavailable");
|
|
471
|
+
fallbackInfo.put("size", 0);
|
|
472
|
+
fallbackInfo.put("nEmbd", 0);
|
|
473
|
+
fallbackInfo.put("nParams", 0);
|
|
474
|
+
callback.onResult(LlamaResult.success(fallbackInfo));
|
|
475
|
+
}
|
|
476
|
+
} catch (Exception e) {
|
|
477
|
+
Log.e(TAG, "Error getting model info: " + e.getMessage());
|
|
478
|
+
// Return error info
|
|
479
|
+
Map<String, Object> errorInfo = new HashMap<>();
|
|
480
|
+
errorInfo.put("path", path);
|
|
481
|
+
errorInfo.put("desc", "Error reading model: " + e.getMessage());
|
|
482
|
+
errorInfo.put("size", 0);
|
|
483
|
+
errorInfo.put("nEmbd", 0);
|
|
484
|
+
errorInfo.put("nParams", 0);
|
|
485
|
+
callback.onResult(LlamaResult.success(errorInfo));
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
public void initContext(int contextId, JSObject params, LlamaCallback<Map<String, Object>> callback) {
|
|
490
|
+
// Check context limit
|
|
491
|
+
if (contexts.size() >= contextLimit) {
|
|
492
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context limit reached")));
|
|
493
|
+
return;
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
try {
|
|
497
|
+
// Extract parameters
|
|
498
|
+
String modelPath = params.getString("model", "");
|
|
499
|
+
if (modelPath == null || modelPath.isEmpty()) {
|
|
500
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Model path is required")));
|
|
501
|
+
return;
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
String filename = new File(modelPath).getName();
|
|
505
|
+
|
|
506
|
+
// Get dynamic search paths
|
|
507
|
+
String[] searchPaths = getModelSearchPaths(filename);
|
|
508
|
+
|
|
509
|
+
// Call native initialization
|
|
510
|
+
long nativeContextId = initContextNative(modelPath, searchPaths, params);
|
|
511
|
+
if (nativeContextId < 0) {
|
|
512
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Failed to initialize native context")));
|
|
513
|
+
return;
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
// Create Java context wrapper
|
|
517
|
+
LlamaContext context = new LlamaContext(contextId);
|
|
518
|
+
context.setNativeContextId(nativeContextId);
|
|
519
|
+
contexts.put(contextId, context);
|
|
520
|
+
|
|
521
|
+
// Return context info
|
|
522
|
+
Map<String, Object> contextInfo = new HashMap<>();
|
|
523
|
+
contextInfo.put("contextId", contextId);
|
|
524
|
+
contextInfo.put("gpu", false);
|
|
525
|
+
contextInfo.put("reasonNoGPU", "Currently not supported");
|
|
526
|
+
|
|
527
|
+
Map<String, Object> modelInfo = new HashMap<>();
|
|
528
|
+
modelInfo.put("desc", "Loaded model");
|
|
529
|
+
modelInfo.put("size", 0);
|
|
530
|
+
modelInfo.put("nEmbd", 0);
|
|
531
|
+
modelInfo.put("nParams", 0);
|
|
532
|
+
modelInfo.put("path", modelPath);
|
|
533
|
+
|
|
534
|
+
contextInfo.put("model", modelInfo);
|
|
535
|
+
contextInfo.put("androidLib", "llama-cpp");
|
|
536
|
+
|
|
537
|
+
callback.onResult(LlamaResult.success(contextInfo));
|
|
538
|
+
|
|
539
|
+
} catch (Exception e) {
|
|
540
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context initialization failed: " + e.getMessage())));
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
public void releaseContext(int contextId, LlamaCallback<Void> callback) {
|
|
545
|
+
LlamaContext context = contexts.get(contextId);
|
|
546
|
+
if (context == null) {
|
|
547
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
548
|
+
return;
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
try {
|
|
552
|
+
// Release native context
|
|
553
|
+
if (context.getNativeContextId() >= 0) {
|
|
554
|
+
releaseContextNative(context.getNativeContextId());
|
|
555
|
+
}
|
|
556
|
+
|
|
557
|
+
// Remove from Java context map
|
|
558
|
+
contexts.remove(contextId);
|
|
559
|
+
|
|
560
|
+
callback.onResult(LlamaResult.success(null));
|
|
561
|
+
|
|
562
|
+
} catch (Exception e) {
|
|
563
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Failed to release context: " + e.getMessage())));
|
|
564
|
+
}
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
public void releaseAllContexts(LlamaCallback<Void> callback) {
|
|
568
|
+
contexts.clear();
|
|
569
|
+
callback.onResult(LlamaResult.success(null));
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
// MARK: - Chat and completion
|
|
573
|
+
|
|
574
|
+
public void getFormattedChat(int contextId, String messages, String chatTemplate, JSObject params, LlamaCallback<Map<String, Object>> callback) {
|
|
575
|
+
LlamaContext context = contexts.get(contextId);
|
|
576
|
+
if (context == null) {
|
|
577
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
578
|
+
return;
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
try {
|
|
582
|
+
// Call native formatted chat
|
|
583
|
+
String result = getFormattedChatNative(context.getNativeContextId(), messages, chatTemplate);
|
|
584
|
+
|
|
585
|
+
// Build formatted chat result - use Lists instead of arrays
|
|
586
|
+
Map<String, Object> formattedChat = new HashMap<>();
|
|
587
|
+
formattedChat.put("type", "llama-chat");
|
|
588
|
+
formattedChat.put("prompt", result);
|
|
589
|
+
formattedChat.put("has_media", false);
|
|
590
|
+
formattedChat.put("media_paths", new ArrayList<String>());
|
|
591
|
+
|
|
592
|
+
callback.onResult(LlamaResult.success(formattedChat));
|
|
593
|
+
|
|
594
|
+
} catch (Exception e) {
|
|
595
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Failed to format chat: " + e.getMessage())));
|
|
596
|
+
}
|
|
597
|
+
}
|
|
598
|
+
|
|
599
|
+
public void completion(int contextId, JSObject params, LlamaCallback<Map<String, Object>> callback) {
|
|
600
|
+
LlamaContext context = contexts.get(contextId);
|
|
601
|
+
if (context == null) {
|
|
602
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
603
|
+
return;
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
try {
|
|
607
|
+
Log.i(TAG, "Starting completion for context: " + contextId);
|
|
608
|
+
|
|
609
|
+
// Call native completion with full params
|
|
610
|
+
Map<String, Object> result = completionNative(context.getNativeContextId(), params);
|
|
611
|
+
|
|
612
|
+
if (result != null) {
|
|
613
|
+
Log.i(TAG, "Completion completed successfully");
|
|
614
|
+
callback.onResult(LlamaResult.success(result));
|
|
615
|
+
} else {
|
|
616
|
+
Log.e(TAG, "Completion returned null result");
|
|
617
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Completion failed")));
|
|
618
|
+
}
|
|
619
|
+
|
|
620
|
+
} catch (Exception e) {
|
|
621
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Completion failed: " + e.getMessage())));
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
public void stopCompletion(int contextId, LlamaCallback<Void> callback) {
|
|
626
|
+
LlamaContext context = contexts.get(contextId);
|
|
627
|
+
if (context == null) {
|
|
628
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
629
|
+
return;
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
try {
|
|
633
|
+
stopCompletionNative(context.getNativeContextId());
|
|
634
|
+
callback.onResult(LlamaResult.success(null));
|
|
635
|
+
} catch (Exception e) {
|
|
636
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Failed to stop completion: " + e.getMessage())));
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
|
|
640
|
+
// MARK: - Session management
|
|
641
|
+
|
|
642
|
+
public void loadSession(int contextId, String filepath, LlamaCallback<Map<String, Object>> callback) {
|
|
643
|
+
if (contexts.get(contextId) == null) {
|
|
644
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
645
|
+
return;
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
// This would typically load session from file
|
|
649
|
+
Map<String, Object> sessionResult = new HashMap<>();
|
|
650
|
+
sessionResult.put("tokens_loaded", 0);
|
|
651
|
+
sessionResult.put("prompt", "");
|
|
652
|
+
|
|
653
|
+
callback.onResult(LlamaResult.success(sessionResult));
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
public void saveSession(int contextId, String filepath, int size, LlamaCallback<Integer> callback) {
|
|
657
|
+
if (contexts.get(contextId) == null) {
|
|
658
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
659
|
+
return;
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
// This would typically save session to file
|
|
663
|
+
callback.onResult(LlamaResult.success(0));
|
|
664
|
+
}
|
|
665
|
+
|
|
666
|
+
// MARK: - Tokenization
|
|
667
|
+
|
|
668
|
+
public void tokenize(int contextId, String text, String[] imagePaths, LlamaCallback<Map<String, Object>> callback) {
|
|
669
|
+
LlamaContext context = contexts.get(contextId);
|
|
670
|
+
if (context == null) {
|
|
671
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
672
|
+
return;
|
|
673
|
+
}
|
|
674
|
+
|
|
675
|
+
try {
|
|
676
|
+
Log.i(TAG, "Tokenizing text: " + text);
|
|
677
|
+
|
|
678
|
+
// Call native tokenization
|
|
679
|
+
Map<String, Object> result = tokenizeNative(context.getNativeContextId(), text, imagePaths);
|
|
680
|
+
|
|
681
|
+
if (result != null) {
|
|
682
|
+
Log.i(TAG, "Tokenization completed successfully");
|
|
683
|
+
callback.onResult(LlamaResult.success(result));
|
|
684
|
+
} else {
|
|
685
|
+
Log.e(TAG, "Tokenization returned null result");
|
|
686
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Tokenization failed")));
|
|
687
|
+
}
|
|
688
|
+
|
|
689
|
+
} catch (Exception e) {
|
|
690
|
+
Log.e(TAG, "Tokenization failed: " + e.getMessage());
|
|
691
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Tokenization failed: " + e.getMessage())));
|
|
692
|
+
}
|
|
693
|
+
}
|
|
694
|
+
|
|
695
|
+
public void detokenize(int contextId, Integer[] tokens, LlamaCallback<String> callback) {
|
|
696
|
+
LlamaContext context = contexts.get(contextId);
|
|
697
|
+
if (context == null) {
|
|
698
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
699
|
+
return;
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
try {
|
|
703
|
+
// Convert Integer[] to int[]
|
|
704
|
+
int[] tokenArray = new int[tokens.length];
|
|
705
|
+
for (int i = 0; i < tokens.length; i++) {
|
|
706
|
+
tokenArray[i] = tokens[i];
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
String result = detokenizeNative(context.getNativeContextId(), tokenArray);
|
|
710
|
+
callback.onResult(LlamaResult.success(result));
|
|
711
|
+
|
|
712
|
+
} catch (Exception e) {
|
|
713
|
+
Log.e(TAG, "Detokenization failed: " + e.getMessage());
|
|
714
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Detokenization failed: " + e.getMessage())));
|
|
715
|
+
}
|
|
716
|
+
}
|
|
717
|
+
|
|
718
|
+
// MARK: - Embeddings and reranking
|
|
719
|
+
|
|
720
|
+
public void embedding(int contextId, String text, JSObject params, LlamaCallback<Map<String, Object>> callback) {
|
|
721
|
+
if (contexts.get(contextId) == null) {
|
|
722
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
723
|
+
return;
|
|
724
|
+
}
|
|
725
|
+
|
|
726
|
+
// Fixed: Use List instead of array for proper JSON serialization
|
|
727
|
+
Map<String, Object> embeddingResult = new HashMap<>();
|
|
728
|
+
List<Double> embeddingList = new ArrayList<>();
|
|
729
|
+
|
|
730
|
+
// Generate mock embedding vector
|
|
731
|
+
for (int i = 0; i < 384; i++) {
|
|
732
|
+
embeddingList.add(Math.random() - 0.5);
|
|
733
|
+
}
|
|
734
|
+
|
|
735
|
+
embeddingResult.put("embedding", embeddingList);
|
|
736
|
+
|
|
737
|
+
callback.onResult(LlamaResult.success(embeddingResult));
|
|
738
|
+
}
|
|
739
|
+
|
|
740
|
+
public void rerank(int contextId, String query, String[] documents, JSObject params, LlamaCallback<List<Map<String, Object>>> callback) {
|
|
741
|
+
if (contexts.get(contextId) == null) {
|
|
742
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
743
|
+
return;
|
|
744
|
+
}
|
|
745
|
+
|
|
746
|
+
// Fixed: Use List instead of array for proper JSON serialization
|
|
747
|
+
List<Map<String, Object>> rerankResults = new ArrayList<>();
|
|
748
|
+
|
|
749
|
+
// Generate mock rerank results
|
|
750
|
+
for (int i = 0; i < documents.length; i++) {
|
|
751
|
+
Map<String, Object> result = new HashMap<>();
|
|
752
|
+
result.put("score", Math.random());
|
|
753
|
+
result.put("index", i);
|
|
754
|
+
rerankResults.add(result);
|
|
755
|
+
}
|
|
756
|
+
|
|
757
|
+
callback.onResult(LlamaResult.success(rerankResults));
|
|
758
|
+
}
|
|
759
|
+
|
|
760
|
+
// MARK: - Benchmarking
|
|
761
|
+
|
|
762
|
+
public void bench(int contextId, int pp, int tg, int pl, int nr, LlamaCallback<String> callback) {
|
|
763
|
+
if (contexts.get(contextId) == null) {
|
|
764
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
765
|
+
return;
|
|
766
|
+
}
|
|
767
|
+
|
|
768
|
+
// This would typically run benchmarks
|
|
769
|
+
String benchResult = "[]";
|
|
770
|
+
callback.onResult(LlamaResult.success(benchResult));
|
|
771
|
+
}
|
|
772
|
+
|
|
773
|
+
// MARK: - LoRA adapters
|
|
774
|
+
|
|
775
|
+
public void applyLoraAdapters(int contextId, List<Map<String, Object>> loraAdapters, LlamaCallback<Void> callback) {
|
|
776
|
+
if (contexts.get(contextId) == null) {
|
|
777
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
778
|
+
return;
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
// This would typically apply LoRA adapters
|
|
782
|
+
callback.onResult(LlamaResult.success(null));
|
|
783
|
+
}
|
|
784
|
+
|
|
785
|
+
public void removeLoraAdapters(int contextId, LlamaCallback<Void> callback) {
|
|
786
|
+
if (contexts.get(contextId) == null) {
|
|
787
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
788
|
+
return;
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
// This would typically remove LoRA adapters
|
|
792
|
+
callback.onResult(LlamaResult.success(null));
|
|
793
|
+
}
|
|
794
|
+
|
|
795
|
+
public void getLoadedLoraAdapters(int contextId, LlamaCallback<List<Map<String, Object>>> callback) {
|
|
796
|
+
if (contexts.get(contextId) == null) {
|
|
797
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
798
|
+
return;
|
|
799
|
+
}
|
|
800
|
+
|
|
801
|
+
// Fixed: Use List instead of array for proper JSON serialization
|
|
802
|
+
List<Map<String, Object>> adapters = new ArrayList<>();
|
|
803
|
+
callback.onResult(LlamaResult.success(adapters));
|
|
804
|
+
}
|
|
805
|
+
|
|
806
|
+
// MARK: - Multimodal methods
|
|
807
|
+
|
|
808
|
+
public void initMultimodal(int contextId, String path, boolean useGpu, LlamaCallback<Boolean> callback) {
|
|
809
|
+
LlamaContext context = contexts.get(contextId);
|
|
810
|
+
if (context == null) {
|
|
811
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
812
|
+
return;
|
|
813
|
+
}
|
|
814
|
+
|
|
815
|
+
context.setMultimodalEnabled(true);
|
|
816
|
+
callback.onResult(LlamaResult.success(true));
|
|
817
|
+
}
|
|
818
|
+
|
|
819
|
+
public void isMultimodalEnabled(int contextId, LlamaCallback<Boolean> callback) {
|
|
820
|
+
LlamaContext context = contexts.get(contextId);
|
|
821
|
+
if (context == null) {
|
|
822
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
823
|
+
return;
|
|
824
|
+
}
|
|
825
|
+
|
|
826
|
+
callback.onResult(LlamaResult.success(context.isMultimodalEnabled()));
|
|
827
|
+
}
|
|
828
|
+
|
|
829
|
+
public void getMultimodalSupport(int contextId, LlamaCallback<Map<String, Object>> callback) {
|
|
830
|
+
if (contexts.get(contextId) == null) {
|
|
831
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
832
|
+
return;
|
|
833
|
+
}
|
|
834
|
+
|
|
835
|
+
Map<String, Object> support = new HashMap<>();
|
|
836
|
+
support.put("vision", true);
|
|
837
|
+
support.put("audio", true);
|
|
838
|
+
|
|
839
|
+
callback.onResult(LlamaResult.success(support));
|
|
840
|
+
}
|
|
841
|
+
|
|
842
|
+
public void releaseMultimodal(int contextId, LlamaCallback<Void> callback) {
|
|
843
|
+
LlamaContext context = contexts.get(contextId);
|
|
844
|
+
if (context == null) {
|
|
845
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
846
|
+
return;
|
|
847
|
+
}
|
|
848
|
+
|
|
849
|
+
context.setMultimodalEnabled(false);
|
|
850
|
+
callback.onResult(LlamaResult.success(null));
|
|
851
|
+
}
|
|
852
|
+
|
|
853
|
+
// MARK: - TTS methods
|
|
854
|
+
|
|
855
|
+
public void initVocoder(int contextId, String path, Integer nBatch, LlamaCallback<Boolean> callback) {
|
|
856
|
+
LlamaContext context = contexts.get(contextId);
|
|
857
|
+
if (context == null) {
|
|
858
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
859
|
+
return;
|
|
860
|
+
}
|
|
861
|
+
|
|
862
|
+
context.setVocoderEnabled(true);
|
|
863
|
+
callback.onResult(LlamaResult.success(true));
|
|
864
|
+
}
|
|
865
|
+
|
|
866
|
+
public void isVocoderEnabled(int contextId, LlamaCallback<Boolean> callback) {
|
|
867
|
+
LlamaContext context = contexts.get(contextId);
|
|
868
|
+
if (context == null) {
|
|
869
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
870
|
+
return;
|
|
871
|
+
}
|
|
872
|
+
|
|
873
|
+
callback.onResult(LlamaResult.success(context.isVocoderEnabled()));
|
|
874
|
+
}
|
|
875
|
+
|
|
876
|
+
public void getFormattedAudioCompletion(int contextId, String speakerJsonStr, String textToSpeak, LlamaCallback<Map<String, Object>> callback) {
|
|
877
|
+
if (contexts.get(contextId) == null) {
|
|
878
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
879
|
+
return;
|
|
880
|
+
}
|
|
881
|
+
|
|
882
|
+
Map<String, Object> audioCompletion = new HashMap<>();
|
|
883
|
+
audioCompletion.put("prompt", "");
|
|
884
|
+
audioCompletion.put("grammar", null);
|
|
885
|
+
|
|
886
|
+
callback.onResult(LlamaResult.success(audioCompletion));
|
|
887
|
+
}
|
|
888
|
+
|
|
889
|
+
public void getAudioCompletionGuideTokens(int contextId, String textToSpeak, LlamaCallback<List<Integer>> callback) {
|
|
890
|
+
if (contexts.get(contextId) == null) {
|
|
891
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
892
|
+
return;
|
|
893
|
+
}
|
|
894
|
+
|
|
895
|
+
// Fixed: Use List instead of array for proper JSON serialization
|
|
896
|
+
List<Integer> tokens = new ArrayList<>();
|
|
897
|
+
callback.onResult(LlamaResult.success(tokens));
|
|
898
|
+
}
|
|
899
|
+
|
|
900
|
+
public void decodeAudioTokens(int contextId, Integer[] tokens, LlamaCallback<List<Integer>> callback) {
|
|
901
|
+
if (contexts.get(contextId) == null) {
|
|
902
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
903
|
+
return;
|
|
904
|
+
}
|
|
905
|
+
|
|
906
|
+
// Fixed: Use List instead of array for proper JSON serialization
|
|
907
|
+
List<Integer> decodedTokens = new ArrayList<>();
|
|
908
|
+
callback.onResult(LlamaResult.success(decodedTokens));
|
|
909
|
+
}
|
|
910
|
+
|
|
911
|
+
public void releaseVocoder(int contextId, LlamaCallback<Void> callback) {
|
|
912
|
+
LlamaContext context = contexts.get(contextId);
|
|
913
|
+
if (context == null) {
|
|
914
|
+
callback.onResult(LlamaResult.failure(new LlamaError("Context not found")));
|
|
915
|
+
return;
|
|
916
|
+
}
|
|
917
|
+
|
|
918
|
+
context.setVocoderEnabled(false);
|
|
919
|
+
callback.onResult(LlamaResult.success(null));
|
|
920
|
+
}
|
|
921
|
+
|
|
922
|
+
// MARK: - Callback Interface
|
|
923
|
+
public interface LlamaCallback<T> {
|
|
924
|
+
void onResult(LlamaResult<T> result);
|
|
925
|
+
}
|
|
926
|
+
|
|
927
|
+
// Add this method to get proper storage paths
|
|
928
|
+
private String[] getModelSearchPaths(String filename) {
|
|
929
|
+
String packageName = context.getPackageName();
|
|
930
|
+
|
|
931
|
+
List<String> paths = new ArrayList<>();
|
|
932
|
+
|
|
933
|
+
// Internal storage (always available, no permissions needed)
|
|
934
|
+
File internalFilesDir = context.getFilesDir();
|
|
935
|
+
paths.add(internalFilesDir.getAbsolutePath() + "/" + filename);
|
|
936
|
+
paths.add(internalFilesDir.getAbsolutePath() + "/Documents/" + filename);
|
|
937
|
+
|
|
938
|
+
// External files directory (app-specific, no permissions needed on Android 10+)
|
|
939
|
+
File externalFilesDir = context.getExternalFilesDir(null);
|
|
940
|
+
if (externalFilesDir != null) {
|
|
941
|
+
paths.add(externalFilesDir.getAbsolutePath() + "/" + filename);
|
|
942
|
+
paths.add(externalFilesDir.getAbsolutePath() + "/Documents/" + filename);
|
|
943
|
+
}
|
|
944
|
+
|
|
945
|
+
// External storage (requires permissions, may not be available)
|
|
946
|
+
if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
|
|
947
|
+
File externalStorage = Environment.getExternalStorageDirectory();
|
|
948
|
+
paths.add(externalStorage.getAbsolutePath() + "/Documents/" + filename);
|
|
949
|
+
paths.add(externalStorage.getAbsolutePath() + "/Download/" + filename);
|
|
950
|
+
paths.add(externalStorage.getAbsolutePath() + "/Downloads/" + filename);
|
|
951
|
+
paths.add(externalStorage.getAbsolutePath() + "/Downloads/models/" + filename);
|
|
952
|
+
}
|
|
953
|
+
|
|
954
|
+
return paths.toArray(new String[0]);
|
|
955
|
+
}
|
|
956
|
+
}
|