node-llama-cpp 3.0.0-beta.10 → 3.0.0-beta.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bindings/AddonTypes.d.ts +3 -0
- package/dist/bindings/getLlama.d.ts +17 -0
- package/dist/bindings/getLlama.js +4 -1
- package/dist/bindings/getLlama.js.map +1 -1
- package/dist/bindings/utils/resolveChatWrapperBasedOnWrapperTypeName.d.ts +26 -0
- package/dist/bindings/utils/resolveChatWrapperBasedOnWrapperTypeName.js +43 -0
- package/dist/bindings/utils/resolveChatWrapperBasedOnWrapperTypeName.js.map +1 -0
- package/dist/cli/cli.js +4 -0
- package/dist/cli/cli.js.map +1 -1
- package/dist/cli/commands/ChatCommand.d.ts +2 -2
- package/dist/cli/commands/ChatCommand.js +3 -39
- package/dist/cli/commands/ChatCommand.js.map +1 -1
- package/dist/cli/commands/CompleteCommand.d.ts +25 -0
- package/dist/cli/commands/CompleteCommand.js +278 -0
- package/dist/cli/commands/CompleteCommand.js.map +1 -0
- package/dist/cli/commands/InfillCommand.d.ts +27 -0
- package/dist/cli/commands/InfillCommand.js +316 -0
- package/dist/cli/commands/InfillCommand.js.map +1 -0
- package/dist/consts.d.ts +1 -0
- package/dist/consts.js +2 -0
- package/dist/consts.js.map +1 -0
- package/dist/evaluator/LlamaChat/LlamaChat.d.ts +2 -33
- package/dist/evaluator/LlamaChat/LlamaChat.js +7 -28
- package/dist/evaluator/LlamaChat/LlamaChat.js.map +1 -1
- package/dist/evaluator/LlamaChatSession/LlamaChatSession.js +1 -1
- package/dist/evaluator/LlamaChatSession/LlamaChatSession.js.map +1 -1
- package/dist/evaluator/LlamaCompletion.d.ts +148 -0
- package/dist/evaluator/LlamaCompletion.js +402 -0
- package/dist/evaluator/LlamaCompletion.js.map +1 -0
- package/dist/evaluator/LlamaContext/LlamaContext.js +6 -2
- package/dist/evaluator/LlamaContext/LlamaContext.js.map +1 -1
- package/dist/evaluator/LlamaModel.d.ts +10 -1
- package/dist/evaluator/LlamaModel.js +33 -3
- package/dist/evaluator/LlamaModel.js.map +1 -1
- package/dist/index.d.ts +6 -4
- package/dist/index.js +4 -2
- package/dist/index.js.map +1 -1
- package/dist/types.d.ts +31 -0
- package/dist/utils/UnsupportedError.d.ts +2 -0
- package/dist/utils/UnsupportedError.js +7 -0
- package/dist/utils/UnsupportedError.js.map +1 -0
- package/dist/utils/getQueuedTokensBeforeStopTrigger.d.ts +6 -0
- package/dist/utils/getQueuedTokensBeforeStopTrigger.js +22 -0
- package/dist/utils/getQueuedTokensBeforeStopTrigger.js.map +1 -0
- package/llama/addon.cpp +63 -9
- package/llama/binariesGithubRelease.json +1 -1
- package/llama/gitRelease.bundle +0 -0
- package/llama/llama.cpp.info.json +1 -1
- package/llamaBins/linux-arm64/.buildMetadata.json +1 -1
- package/llamaBins/linux-arm64/llama-addon.node +0 -0
- package/llamaBins/linux-armv7l/.buildMetadata.json +1 -1
- package/llamaBins/linux-armv7l/llama-addon.node +0 -0
- package/llamaBins/linux-x64/.buildMetadata.json +1 -1
- package/llamaBins/linux-x64/llama-addon.node +0 -0
- package/llamaBins/linux-x64-cuda/.buildMetadata.json +1 -1
- package/llamaBins/linux-x64-cuda/llama-addon.node +0 -0
- package/llamaBins/mac-arm64-metal/.buildMetadata.json +1 -1
- package/llamaBins/mac-arm64-metal/ggml-metal.metal +378 -6
- package/llamaBins/mac-arm64-metal/llama-addon.node +0 -0
- package/llamaBins/mac-x64/.buildMetadata.json +1 -1
- package/llamaBins/mac-x64/llama-addon.node +0 -0
- package/llamaBins/win-x64/.buildMetadata.json +1 -1
- package/llamaBins/win-x64/llama-addon.node +0 -0
- package/llamaBins/win-x64-cuda/.buildMetadata.json +1 -1
- package/llamaBins/win-x64-cuda/llama-addon.node +0 -0
- package/package.json +2 -2
- package/dist/AbortError.d.ts +0 -2
- package/dist/AbortError.js +0 -7
- package/dist/AbortError.js.map +0 -1
|
@@ -10,6 +10,7 @@ export class LlamaModel {
|
|
|
10
10
|
/** @internal */ _disposedState = { disposed: false };
|
|
11
11
|
/** @internal */ _typeDescription;
|
|
12
12
|
/** @internal */ _trainContextSize;
|
|
13
|
+
/** @internal */ _embeddingVectorSize;
|
|
13
14
|
onDispose = new EventRelay();
|
|
14
15
|
/**
|
|
15
16
|
* > options source:
|
|
@@ -92,6 +93,13 @@ export class LlamaModel {
|
|
|
92
93
|
this._trainContextSize = this._model.getTrainContextSize();
|
|
93
94
|
return this._trainContextSize;
|
|
94
95
|
}
|
|
96
|
+
/** The size of an embedding vector the model can produce */
|
|
97
|
+
get embeddingVectorSize() {
|
|
98
|
+
this._ensureNotDisposed();
|
|
99
|
+
if (this._embeddingVectorSize == null)
|
|
100
|
+
this._embeddingVectorSize = this._model.getEmbeddingVectorSize();
|
|
101
|
+
return this._embeddingVectorSize;
|
|
102
|
+
}
|
|
95
103
|
/** @internal */
|
|
96
104
|
_ensureNotDisposed() {
|
|
97
105
|
if (this._disposedState.disposed)
|
|
@@ -108,6 +116,7 @@ export class LlamaModelTokens {
|
|
|
108
116
|
/** @internal */ _bosString;
|
|
109
117
|
/** @internal */ _eosString;
|
|
110
118
|
/** @internal */ _nlString;
|
|
119
|
+
/** @internal */ _shouldPrependBosToken;
|
|
111
120
|
constructor(model, disposedState) {
|
|
112
121
|
this._model = model;
|
|
113
122
|
this._disposedState = disposedState;
|
|
@@ -190,6 +199,15 @@ export class LlamaModelTokens {
|
|
|
190
199
|
this._nlString = this._model.getTokenString(nlToken);
|
|
191
200
|
return this._nlString;
|
|
192
201
|
}
|
|
202
|
+
/**
|
|
203
|
+
* @returns Whether we should prepend a BOS (Beginning Of Sequence) token for evaluations with this model.
|
|
204
|
+
*/
|
|
205
|
+
get shouldPrependBosToken() {
|
|
206
|
+
this._ensureNotDisposed();
|
|
207
|
+
if (this._shouldPrependBosToken == null)
|
|
208
|
+
this._shouldPrependBosToken = this.bos != null && this._model.shouldPrependBosToken();
|
|
209
|
+
return this._shouldPrependBosToken;
|
|
210
|
+
}
|
|
193
211
|
/** @internal */
|
|
194
212
|
_ensureNotDisposed() {
|
|
195
213
|
if (this._disposedState.disposed)
|
|
@@ -221,7 +239,7 @@ export class LlamaModelInfillTokens {
|
|
|
221
239
|
get prefix() {
|
|
222
240
|
this._ensureNotDisposed();
|
|
223
241
|
if (this._prefixToken == null)
|
|
224
|
-
this._prefixToken = this._model.prefixToken();
|
|
242
|
+
this._prefixToken = this._resolveSpecialToken(this._model.prefixToken(), ["<fim_prefix>"]);
|
|
225
243
|
if (this._prefixToken === -1)
|
|
226
244
|
return null;
|
|
227
245
|
return this._prefixToken;
|
|
@@ -232,7 +250,7 @@ export class LlamaModelInfillTokens {
|
|
|
232
250
|
get middle() {
|
|
233
251
|
this._ensureNotDisposed();
|
|
234
252
|
if (this._middleToken == null)
|
|
235
|
-
this._middleToken = this._model.middleToken();
|
|
253
|
+
this._middleToken = this._resolveSpecialToken(this._model.middleToken(), ["<fim_middle>"]);
|
|
236
254
|
if (this._middleToken === -1)
|
|
237
255
|
return null;
|
|
238
256
|
return this._middleToken;
|
|
@@ -243,7 +261,7 @@ export class LlamaModelInfillTokens {
|
|
|
243
261
|
get suffix() {
|
|
244
262
|
this._ensureNotDisposed();
|
|
245
263
|
if (this._suffixToken == null)
|
|
246
|
-
this._suffixToken = this._model.suffixToken();
|
|
264
|
+
this._suffixToken = this._resolveSpecialToken(this._model.suffixToken(), ["<fim_suffix>"]);
|
|
247
265
|
if (this._suffixToken === -1)
|
|
248
266
|
return null;
|
|
249
267
|
return this._suffixToken;
|
|
@@ -313,6 +331,18 @@ export class LlamaModelInfillTokens {
|
|
|
313
331
|
throw new DisposedError();
|
|
314
332
|
}
|
|
315
333
|
/** @internal */
|
|
334
|
+
_resolveSpecialToken(token, fallbackTexts) {
|
|
335
|
+
if (token != null && token !== -1)
|
|
336
|
+
return token;
|
|
337
|
+
for (const text of fallbackTexts) {
|
|
338
|
+
const tokens = this._model.tokenize(text, true);
|
|
339
|
+
if (tokens.length !== 1)
|
|
340
|
+
continue;
|
|
341
|
+
return tokens[0];
|
|
342
|
+
}
|
|
343
|
+
return -1;
|
|
344
|
+
}
|
|
345
|
+
/** @internal */
|
|
316
346
|
static _create(model, disposedState) {
|
|
317
347
|
return new LlamaModelInfillTokens(model, disposedState);
|
|
318
348
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"LlamaModel.js","sourceRoot":"","sources":["../../src/evaluator/LlamaModel.ts"],"names":[],"mappings":"AAAA,OAAO,OAAO,MAAM,SAAS,CAAC;AAC9B,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAC,aAAa,EAAE,UAAU,EAAC,MAAM,iBAAiB,CAAC;AAC1D,OAAO,EAAC,gBAAgB,EAAC,MAAM,8BAA8B,CAAC;
|
|
1
|
+
{"version":3,"file":"LlamaModel.js","sourceRoot":"","sources":["../../src/evaluator/LlamaModel.ts"],"names":[],"mappings":"AAAA,OAAO,OAAO,MAAM,SAAS,CAAC;AAC9B,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAC,aAAa,EAAE,UAAU,EAAC,MAAM,iBAAiB,CAAC;AAC1D,OAAO,EAAC,gBAAgB,EAAC,MAAM,8BAA8B,CAAC;AA6B9D,MAAM,OAAO,UAAU;IACnB,gBAAgB,CAAiB,MAAM,CAAQ;IAC/C,gBAAgB,CAAiB,MAAM,CAAa;IACpD,gBAAgB,CAAkB,OAAO,CAAmB;IAC5D,gBAAgB,CAAkB,SAAS,CAAU;IACrD,gBAAgB,CAAkB,cAAc,GAAkB,EAAC,QAAQ,EAAE,KAAK,EAAC,CAAC;IACpF,gBAAgB,CAAS,gBAAgB,CAAwB;IACjE,gBAAgB,CAAS,iBAAiB,CAAU;IACpD,gBAAgB,CAAS,oBAAoB,CAAU;IAEvC,SAAS,GAAG,IAAI,UAAU,EAAQ,CAAC;IAEnD;;;;;;;;;;OAUG;IACH,YAAmB,EACf,KAAK,EAAE,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,OAAO,EAAE,QAAQ,EACzC;QAChB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC;QACpB,IAAI,CAAC,MAAM,GAAG,IAAI,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,SAAS,CAAC,EAAE,gBAAgB,CAAC;YACxG,SAAS;YACT,SAAS;YACT,OAAO;YACP,QAAQ;SACX,CAAC,CAAC,CAAC;QACJ,IAAI,CAAC,OAAO,GAAG,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;QAC1E,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;QAE1C,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACzC,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,OAAO;QACV,IAAI,IAAI,CAAC,cAAc,CAAC,QAAQ;YAC5B,OAAO;QAEX,IAAI,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC;QAC/B,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;QACtB,IAAI,CAAC,cAAc,CAAC,QAAQ,GAAG,IAAI,CAAC;IACxC,CAAC;IAED,cAAc;IACP,CAAC,MAAM,CAAC,OAAO,CAAC;QACnB,IAAI,CAAC,OAAO,EAAE,CAAC;IACnB,CAAC;IAED,IAAW,QAAQ;QACf,OAAO,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED,IAAW,MAAM;QACb,OAAO,IAAI,CAAC,OAAO,CAAC;IACxB,CAAC;IAED,IAAW,QAAQ;QACf,OAAO,IAAI,CAAC,SAAS,CAAC;IAC1B,CAAC;IAWM,QAAQ,CAAC,IAAY,EAAE,gBAAqC,KAAK;QACpE,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,IAAI,KAAK,EAAE;YACX,OAAO,EAAE,CAAC;QAEd,IAAI,aAAa,KAAK,SAAS,EAAE;YAC7B,MAAM,YAAY,GAAG,IAAgC,CAAC;YAEtD,QAAQ,YAAY,EAAE;gBAClB,KAAK,KAAK,CAAC,CAAC,OAAO,IAAI,CAAC,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;gBACpE,KAAK,KAAK,CAAC,CAAC,OAAO,IAAI,CAAC,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;gBACpE,KAAK,IAAI,CAAC,CAAC,OAAO,IAAI,CAAC,MAAM,CAAC,EAAE,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;aACpE;YAED,KAAK,CAAC,YAA4B,CAAC,CAAC;YACpC,MAAM,IAAI,KAAK,CAAC,kCAAkC,YAAY,EAAE,CAAC,CAAC;SACrE;QAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,EAAE,aAAa,CAAC,CAAY,CAAC;IAC5E,CAAC;IAED,iCAAiC;IAC1B,UAAU,CAAC,MAAwB;QACtC,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC;YACnB,OAAO,EAAE,CAAC;QAEd,OAAO,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;IAC5D,CAAC;IAED,iFAAiF;IACjF,IAAW,eAAe;QACtB,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI;YAC7B,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,MAAM,CAAC,mBAAmB,EAAE,CAAC;QAE9D,OAAO,IAAI,CAAC,gBAAgB,CAAC;IACjC,CAAC;IAED,gDAAgD;IAChD,IAAW,gBAAgB;QACvB,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,IAAI,CAAC,iBAAiB,IAAI,IAAI;YAC9B,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAC,MAAM,CAAC,mBAAmB,EAAE,CAAC;QAE/D,OAAO,IAAI,CAAC,iBAAiB,CAAC;IAClC,CAAC;IAED,4DAA4D;IAC5D,IAAW,mBAAmB;QAC1B,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,IAAI,CAAC,oBAAoB,IAAI,IAAI;YACjC,IAAI,CAAC,oBAAoB,GAAG,IAAI,CAAC,MAAM,CAAC,sBAAsB,EAAE,CAAC;QAErE,OAAO,IAAI,CAAC,oBAAoB,CAAC;IACrC,CAAC;IAED,gBAAgB;IACR,kBAAkB;QACtB,IAAI,IAAI,CAAC,cAAc,CAAC,QAAQ;YAC5B,MAAM,IAAI,aAAa,EAAE,CAAC;IAClC,CAAC;CACJ;AAED,MAAM,OAAO,gBAAgB;IACzB,gBAAgB,CAAkB,MAAM,CAAa;IACrD,gBAAgB,CAAkB,cAAc,CAAgB;IAChE,gBAAgB,CAAS,aAAa,CAA0B;IAChE,gBAAgB,CAAS,SAAS,CAAS;IAC3C,gBAAgB,CAAS,SAAS,CAAS;IAC3C,gBAAgB,CAAS,QAAQ,CAAS;IAC1C,gBAAgB,CAAS,UAAU,CAAU;IAC7C,gBAAgB,CAAS,UAAU,CAAU;IAC7C,gBAAgB,CAAS,SAAS,CAAU;IAC5C,gBAAgB,CAAS,sBAAsB,CAAW;IAE1D,YAAoB,KAAiB,EAAE,aAA4B;QAC/D,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC;QACpB,IAAI,CAAC,cAAc,GAAG,aAAa,CAAC;IACxC,CAAC;IAED;;OAEG;IACH,IAAW,MAAM;QACb,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI;YAC1B,IAAI,CAAC,aAAa,GAAG,sBAAsB,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;QAE1F,OAAO,IAAI,CAAC,aAAa,CAAC;IAC9B,CAAC;IAED;;OAEG;IACH,IAAW,GAAG;QACV,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI;YACtB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC;QAE5C,IAAI,IAAI,CAAC,SAAS,KAAK,CAAC,CAAC;YACrB,OAAO,IAAI,CAAC;QAEhB,OAAO,IAAI,CAAC,SAAS,CAAC;IAC1B,CAAC;IAED;;OAEG;IACH,IAAW,GAAG;QACV,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI;YACtB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC;QAE5C,IAAI,IAAI,CAAC,SAAS,KAAK,CAAC,CAAC;YACrB,OAAO,IAAI,CAAC;QAEhB,OAAO,IAAI,CAAC,SAAS,CAAC;IAC1B,CAAC;IAED;;OAEG;IACH,IAAW,EAAE;QACT,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI;YACrB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;QAE1C,IAAI,IAAI,CAAC,QAAQ,KAAK,CAAC,CAAC;YACpB,OAAO,IAAI,CAAC;QAEhB,OAAO,IAAI,CAAC,QAAQ,CAAC;IACzB,CAAC;IAED;;OAEG;IACH,IAAW,SAAS;QAChB,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC;QAE1B,IAAI,QAAQ,IAAI,IAAI;YAChB,OAAO,IAAI,CAAC;QAEhB,IAAI,IAAI,CAAC,UAAU,IAAI,IAAI;YACvB,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;QAE3D,OAAO,IAAI,CAAC,UAAU,CAAC;IAC3B,CAAC;IAED;;OAEG;IACH,IAAW,SAAS;QAChB,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC;QAE1B,IAAI,QAAQ,IAAI,IAAI;YAChB,OAAO,IAAI,CAAC;QAEhB,IAAI,IAAI,CAAC,UAAU,IAAI,IAAI;YACvB,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;QAE3D,OAAO,IAAI,CAAC,UAAU,CAAC;IAC3B,CAAC;IAED;;OAEG;IACH,IAAW,QAAQ;QACf,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,MAAM,OAAO,GAAG,IAAI,CAAC,EAAE,CAAC;QAExB,IAAI,OAAO,IAAI,IAAI;YACf,OAAO,IAAI,CAAC;QAEhB,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI;YACtB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;QAEzD,OAAO,IAAI,CAAC,SAAS,CAAC;IAC1B,CAAC;IAED;;OAEG;IACH,IAAW,qBAAqB;QAC5B,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,IAAI,CAAC,sBAAsB,IAAI,IAAI;YACnC,IAAI,CAAC,sBAAsB,GAAG,IAAI,CAAC,GAAG,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,qBAAqB,EAAE,CAAC;QAE1F,OAAO,IAAI,CAAC,sBAAsB,CAAC;IACvC,CAAC;IAED,gBAAgB;IACR,kBAAkB;QACtB,IAAI,IAAI,CAAC,cAAc,CAAC,QAAQ;YAC5B,MAAM,IAAI,aAAa,EAAE,CAAC;IAClC,CAAC;IAED,gBAAgB;IACT,MAAM,CAAC,OAAO,CAAC,KAAiB,EAAE,aAA4B;QACjE,OAAO,IAAI,gBAAgB,CAAC,KAAK,EAAE,aAAa,CAAC,CAAC;IACtD,CAAC;CACJ;AAED,MAAM,OAAO,sBAAsB;IAC/B,gBAAgB,CAAkB,MAAM,CAAa;IACrD,gBAAgB,CAAkB,cAAc,CAAgB;IAChE,gBAAgB,CAAS,YAAY,CAAS;IAC9C,gBAAgB,CAAS,YAAY,CAAS;IAC9C,gBAAgB,CAAS,YAAY,CAAS;IAC9C,gBAAgB,CAAS,SAAS,CAAS;IAC3C,gBAAgB,CAAS,aAAa,CAAU;IAChD,gBAAgB,CAAS,aAAa,CAAU;IAChD,gBAAgB,CAAS,aAAa,CAAU;IAChD,gBAAgB,CAAS,UAAU,CAAU;IAE7C,YAAoB,KAAiB,EAAE,aAA4B;QAC/D,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC;QACpB,IAAI,CAAC,cAAc,GAAG,aAAa,CAAC;IACxC,CAAC;IAED;;OAEG;IACH,IAAW,MAAM;QACb,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI;YACzB,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QAE/F,IAAI,IAAI,CAAC,YAAY,KAAK,CAAC,CAAC;YACxB,OAAO,IAAI,CAAC;QAEhB,OAAO,IAAI,CAAC,YAAY,CAAC;IAC7B,CAAC;IAED;;OAEG;IACH,IAAW,MAAM;QACb,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI;YACzB,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QAE/F,IAAI,IAAI,CAAC,YAAY,KAAK,CAAC,CAAC;YACxB,OAAO,IAAI,CAAC;QAEhB,OAAO,IAAI,CAAC,YAAY,CAAC;IAC7B,CAAC;IAED;;OAEG;IACH,IAAW,MAAM;QACb,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI;YACzB,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QAE/F,IAAI,IAAI,CAAC,YAAY,KAAK,CAAC,CAAC;YACxB,OAAO,IAAI,CAAC;QAEhB,OAAO,IAAI,CAAC,YAAY,CAAC;IAC7B,CAAC;IAED;;OAEG;IACH,IAAW,GAAG;QACV,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI;YACtB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC;QAE5C,IAAI,IAAI,CAAC,SAAS,KAAK,CAAC,CAAC;YACrB,OAAO,IAAI,CAAC;QAEhB,OAAO,IAAI,CAAC,SAAS,CAAC;IAC1B,CAAC;IAED;;OAEG;IACH,IAAW,YAAY;QACnB,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC;QAEhC,IAAI,WAAW,IAAI,IAAI;YACnB,OAAO,IAAI,CAAC;QAEhB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI;YAC1B,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC;QAEjE,OAAO,IAAI,CAAC,aAAa,CAAC;IAC9B,CAAC;IAED;;OAEG;IACH,IAAW,YAAY;QACnB,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC;QAEhC,IAAI,WAAW,IAAI,IAAI;YACnB,OAAO,IAAI,CAAC;QAEhB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI;YAC1B,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC;QAEjE,OAAO,IAAI,CAAC,aAAa,CAAC;IAC9B,CAAC;IAED;;OAEG;IACH,IAAW,YAAY;QACnB,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC;QAEhC,IAAI,WAAW,IAAI,IAAI;YACnB,OAAO,IAAI,CAAC;QAEhB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI;YAC1B,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC;QAEjE,OAAO,IAAI,CAAC,aAAa,CAAC;IAC9B,CAAC;IAED;;OAEG;IACH,IAAW,SAAS;QAChB,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAE1B,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC;QAE1B,IAAI,QAAQ,IAAI,IAAI;YAChB,OAAO,IAAI,CAAC;QAEhB,IAAI,IAAI,CAAC,UAAU,IAAI,IAAI;YACvB,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;QAE3D,OAAO,IAAI,CAAC,UAAU,CAAC;IAC3B,CAAC;IAED,gBAAgB;IACR,kBAAkB;QACtB,IAAI,IAAI,CAAC,cAAc,CAAC,QAAQ;YAC5B,MAAM,IAAI,aAAa,EAAE,CAAC;IAClC,CAAC;IAED,gBAAgB;IACR,oBAAoB,CAAC,KAAY,EAAE,aAAuB;QAC9D,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,KAAK,CAAC,CAAC;YAC7B,OAAO,KAAK,CAAC;QAEjB,KAAK,MAAM,IAAI,IAAI,aAAa,EAAE;YAC9B,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;YAChD,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC;gBACnB,SAAS;YAEb,OAAO,MAAM,CAAC,CAAC,CAAU,CAAC;SAC7B;QAED,OAAO,CAAC,CAAU,CAAC;IACvB,CAAC;IAED,gBAAgB;IACT,MAAM,CAAC,OAAO,CAAC,KAAiB,EAAE,aAA4B;QACjE,OAAO,IAAI,sBAAsB,CAAC,KAAK,EAAE,aAAa,CAAC,CAAC;IAC5D,CAAC;CACJ"}
|
package/dist/index.d.ts
CHANGED
|
@@ -13,8 +13,9 @@ import { LlamaEmbeddingContext, type LlamaEmbeddingContextOptions, LlamaEmbeddin
|
|
|
13
13
|
import { type LlamaContextOptions, type BatchingOptions, type LlamaContextSequenceRepeatPenalty, type CustomBatchingDispatchSchedule, type CustomBatchingPrioritizeStrategy, type BatchItem, type PrioritizedBatchItem, type ContextShiftOptions, type ContextTokensDeleteRange, type EvaluationPriority } from "./evaluator/LlamaContext/types.js";
|
|
14
14
|
import { LlamaChatSession, type LlamaChatSessionOptions, type LlamaChatSessionContextShiftOptions, type LLamaChatPromptOptions, type LlamaChatSessionRepeatPenalty } from "./evaluator/LlamaChatSession/LlamaChatSession.js";
|
|
15
15
|
import { defineChatSessionFunction } from "./evaluator/LlamaChatSession/utils/defineChatSessionFunction.js";
|
|
16
|
-
import { LlamaChat, type LlamaChatOptions, type LLamaChatGenerateResponseOptions, type LLamaChatContextShiftOptions, type
|
|
17
|
-
import {
|
|
16
|
+
import { LlamaChat, type LlamaChatOptions, type LLamaChatGenerateResponseOptions, type LLamaChatContextShiftOptions, type LlamaChatResponse, type LlamaChatResponseFunctionCall } from "./evaluator/LlamaChat/LlamaChat.js";
|
|
17
|
+
import { LlamaCompletion, type LlamaCompletionOptions, type LlamaCompletionGenerationOptions, type LlamaInfillGenerationOptions } from "./evaluator/LlamaCompletion.js";
|
|
18
|
+
import { UnsupportedError } from "./utils/UnsupportedError.js";
|
|
18
19
|
import { ChatWrapper, type ChatWrapperSettings } from "./ChatWrapper.js";
|
|
19
20
|
import { EmptyChatWrapper } from "./chatWrappers/EmptyChatWrapper.js";
|
|
20
21
|
import { LlamaChatWrapper } from "./chatWrappers/LlamaChatWrapper.js";
|
|
@@ -24,9 +25,10 @@ import { FalconChatWrapper } from "./chatWrappers/FalconChatWrapper.js";
|
|
|
24
25
|
import { AlpacaChatWrapper } from "./chatWrappers/AlpacaChatWrapper.js";
|
|
25
26
|
import { FunctionaryChatWrapper } from "./chatWrappers/FunctionaryChatWrapper.js";
|
|
26
27
|
import { resolveChatWrapperBasedOnModel } from "./chatWrappers/resolveChatWrapperBasedOnModel.js";
|
|
28
|
+
import { resolveChatWrapperBasedOnWrapperTypeName, chatWrapperTypeNames, type ChatWrapperTypeName } from "./bindings/utils/resolveChatWrapperBasedOnWrapperTypeName.js";
|
|
27
29
|
import { LlamaText, SpecialToken, BuiltinSpecialToken, isLlamaText, tokenizeText, type LlamaTextJSON, type LlamaTextJSONValue, type LlamaTextSpecialTokenJSON } from "./utils/LlamaText.js";
|
|
28
30
|
import { appendUserMessageToChatHistory } from "./utils/appendUserMessageToChatHistory.js";
|
|
29
31
|
import { getModuleVersion } from "./utils/getModuleVersion.js";
|
|
30
|
-
import { type ChatHistoryItem, type ChatModelFunctionCall, type ChatModelFunctions, type ChatModelResponse, type ChatSessionModelFunction, type ChatSessionModelFunctions, type ChatSystemMessage, type ChatUserMessage, type Token, isChatModelResponseFunctionCall } from "./types.js";
|
|
32
|
+
import { type ChatHistoryItem, type ChatModelFunctionCall, type ChatModelFunctions, type ChatModelResponse, type ChatSessionModelFunction, type ChatSessionModelFunctions, type ChatSystemMessage, type ChatUserMessage, type Token, isChatModelResponseFunctionCall, type LLamaContextualRepeatPenalty } from "./types.js";
|
|
31
33
|
import { type GbnfJsonArraySchema, type GbnfJsonBasicSchema, type GbnfJsonConstSchema, type GbnfJsonEnumSchema, type GbnfJsonObjectSchema, type GbnfJsonOneOfSchema, type GbnfJsonSchema, type GbnfJsonSchemaImmutableType, type GbnfJsonSchemaToType } from "./utils/gbnfJson/types.js";
|
|
32
|
-
export { Llama, getLlama, type LlamaOptions, LlamaLogLevel, NoBinaryFoundError, LlamaModel, LlamaModelTokens, LlamaModelInfillTokens, type LlamaModelOptions, LlamaGrammar, type LlamaGrammarOptions, LlamaJsonSchemaGrammar, LlamaJsonSchemaValidationError, LlamaGrammarEvaluationState, type LlamaGrammarEvaluationStateOptions, LlamaContext, LlamaContextSequence, type LlamaContextOptions, type BatchingOptions, type CustomBatchingDispatchSchedule, type CustomBatchingPrioritizeStrategy, type BatchItem, type PrioritizedBatchItem, type ContextShiftOptions, type ContextTokensDeleteRange, type EvaluationPriority, type LlamaContextSequenceRepeatPenalty, LlamaEmbeddingContext, type LlamaEmbeddingContextOptions, LlamaEmbedding, type LlamaEmbeddingJSON, LlamaChatSession, defineChatSessionFunction, type LlamaChatSessionOptions, type LlamaChatSessionContextShiftOptions, type LLamaChatPromptOptions, type LlamaChatSessionRepeatPenalty, LlamaChat, type LlamaChatOptions, type LLamaChatGenerateResponseOptions, type LLamaChatContextShiftOptions, type
|
|
34
|
+
export { Llama, getLlama, type LlamaOptions, LlamaLogLevel, NoBinaryFoundError, LlamaModel, LlamaModelTokens, LlamaModelInfillTokens, type LlamaModelOptions, LlamaGrammar, type LlamaGrammarOptions, LlamaJsonSchemaGrammar, LlamaJsonSchemaValidationError, LlamaGrammarEvaluationState, type LlamaGrammarEvaluationStateOptions, LlamaContext, LlamaContextSequence, type LlamaContextOptions, type BatchingOptions, type CustomBatchingDispatchSchedule, type CustomBatchingPrioritizeStrategy, type BatchItem, type PrioritizedBatchItem, type ContextShiftOptions, type ContextTokensDeleteRange, type EvaluationPriority, type LlamaContextSequenceRepeatPenalty, LlamaEmbeddingContext, type LlamaEmbeddingContextOptions, LlamaEmbedding, type LlamaEmbeddingJSON, LlamaChatSession, defineChatSessionFunction, type LlamaChatSessionOptions, type LlamaChatSessionContextShiftOptions, type LLamaChatPromptOptions, type LlamaChatSessionRepeatPenalty, LlamaChat, type LlamaChatOptions, type LLamaChatGenerateResponseOptions, type LLamaChatContextShiftOptions, type LLamaContextualRepeatPenalty, type LlamaChatResponse, type LlamaChatResponseFunctionCall, LlamaCompletion, type LlamaCompletionOptions, type LlamaCompletionGenerationOptions, type LlamaInfillGenerationOptions, UnsupportedError, DisposedError, ChatWrapper, type ChatWrapperSettings, EmptyChatWrapper, LlamaChatWrapper, GeneralChatWrapper, ChatMLChatWrapper, FalconChatWrapper, AlpacaChatWrapper, FunctionaryChatWrapper, resolveChatWrapperBasedOnModel, resolveChatWrapperBasedOnWrapperTypeName, chatWrapperTypeNames, type ChatWrapperTypeName, LlamaText, SpecialToken, BuiltinSpecialToken, isLlamaText, tokenizeText, type LlamaTextJSON, type LlamaTextJSONValue, type LlamaTextSpecialTokenJSON, appendUserMessageToChatHistory, getModuleVersion, type ChatHistoryItem, type ChatModelFunctionCall, type ChatModelFunctions, type ChatModelResponse, type ChatSessionModelFunction, type ChatSessionModelFunctions, type ChatSystemMessage, type ChatUserMessage, type Token, isChatModelResponseFunctionCall, type GbnfJsonSchema, type GbnfJsonSchemaToType, type GbnfJsonSchemaImmutableType, type GbnfJsonBasicSchema, type GbnfJsonConstSchema, type GbnfJsonEnumSchema, type GbnfJsonOneOfSchema, type GbnfJsonObjectSchema, type GbnfJsonArraySchema };
|
package/dist/index.js
CHANGED
|
@@ -13,7 +13,8 @@ import { LlamaEmbeddingContext, LlamaEmbedding } from "./evaluator/LlamaEmbeddin
|
|
|
13
13
|
import { LlamaChatSession } from "./evaluator/LlamaChatSession/LlamaChatSession.js";
|
|
14
14
|
import { defineChatSessionFunction } from "./evaluator/LlamaChatSession/utils/defineChatSessionFunction.js";
|
|
15
15
|
import { LlamaChat } from "./evaluator/LlamaChat/LlamaChat.js";
|
|
16
|
-
import {
|
|
16
|
+
import { LlamaCompletion } from "./evaluator/LlamaCompletion.js";
|
|
17
|
+
import { UnsupportedError } from "./utils/UnsupportedError.js";
|
|
17
18
|
import { ChatWrapper } from "./ChatWrapper.js";
|
|
18
19
|
import { EmptyChatWrapper } from "./chatWrappers/EmptyChatWrapper.js";
|
|
19
20
|
import { LlamaChatWrapper } from "./chatWrappers/LlamaChatWrapper.js";
|
|
@@ -23,9 +24,10 @@ import { FalconChatWrapper } from "./chatWrappers/FalconChatWrapper.js";
|
|
|
23
24
|
import { AlpacaChatWrapper } from "./chatWrappers/AlpacaChatWrapper.js";
|
|
24
25
|
import { FunctionaryChatWrapper } from "./chatWrappers/FunctionaryChatWrapper.js";
|
|
25
26
|
import { resolveChatWrapperBasedOnModel } from "./chatWrappers/resolveChatWrapperBasedOnModel.js";
|
|
27
|
+
import { resolveChatWrapperBasedOnWrapperTypeName, chatWrapperTypeNames } from "./bindings/utils/resolveChatWrapperBasedOnWrapperTypeName.js";
|
|
26
28
|
import { LlamaText, SpecialToken, BuiltinSpecialToken, isLlamaText, tokenizeText } from "./utils/LlamaText.js";
|
|
27
29
|
import { appendUserMessageToChatHistory } from "./utils/appendUserMessageToChatHistory.js";
|
|
28
30
|
import { getModuleVersion } from "./utils/getModuleVersion.js";
|
|
29
31
|
import { isChatModelResponseFunctionCall } from "./types.js";
|
|
30
|
-
export { Llama, getLlama, LlamaLogLevel, NoBinaryFoundError, LlamaModel, LlamaModelTokens, LlamaModelInfillTokens, LlamaGrammar, LlamaJsonSchemaGrammar, LlamaJsonSchemaValidationError, LlamaGrammarEvaluationState, LlamaContext, LlamaContextSequence, LlamaEmbeddingContext, LlamaEmbedding, LlamaChatSession, defineChatSessionFunction, LlamaChat,
|
|
32
|
+
export { Llama, getLlama, LlamaLogLevel, NoBinaryFoundError, LlamaModel, LlamaModelTokens, LlamaModelInfillTokens, LlamaGrammar, LlamaJsonSchemaGrammar, LlamaJsonSchemaValidationError, LlamaGrammarEvaluationState, LlamaContext, LlamaContextSequence, LlamaEmbeddingContext, LlamaEmbedding, LlamaChatSession, defineChatSessionFunction, LlamaChat, LlamaCompletion, UnsupportedError, DisposedError, ChatWrapper, EmptyChatWrapper, LlamaChatWrapper, GeneralChatWrapper, ChatMLChatWrapper, FalconChatWrapper, AlpacaChatWrapper, FunctionaryChatWrapper, resolveChatWrapperBasedOnModel, resolveChatWrapperBasedOnWrapperTypeName, chatWrapperTypeNames, LlamaText, SpecialToken, BuiltinSpecialToken, isLlamaText, tokenizeText, appendUserMessageToChatHistory, getModuleVersion, isChatModelResponseFunctionCall };
|
|
31
33
|
//# sourceMappingURL=index.js.map
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,aAAa,EAAC,MAAM,iBAAiB,CAAC;AAC9C,OAAO,EAAC,KAAK,EAAC,MAAM,qBAAqB,CAAC;AAC1C,OAAO,EAAC,QAAQ,EAAe,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAC,kBAAkB,EAAC,MAAM,wCAAwC,CAAC;AAC1E,OAAO,EAAC,aAAa,EAAC,MAAM,qBAAqB,CAAC;AAClD,OAAO,EAAC,UAAU,EAAE,sBAAsB,EAA0B,gBAAgB,EAAC,MAAM,2BAA2B,CAAC;AACvH,OAAO,EAAC,YAAY,EAA2B,MAAM,6BAA6B,CAAC;AACnF,OAAO,EAAC,sBAAsB,EAAC,MAAM,uCAAuC,CAAC;AAC7E,OAAO,EAAC,8BAA8B,EAAC,MAAM,2DAA2D,CAAC;AACzG,OAAO,EAAC,2BAA2B,EAAqC,MAAM,4CAA4C,CAAC;AAC3H,OAAO,EAAC,YAAY,EAAE,oBAAoB,EAAC,MAAM,0CAA0C,CAAC;AAC5F,OAAO,EACH,qBAAqB,EAAqC,cAAc,EAC3E,MAAM,sCAAsC,CAAC;AAM9C,OAAO,EACH,gBAAgB,EAEnB,MAAM,kDAAkD,CAAC;AAC1D,OAAO,EAAC,yBAAyB,EAAC,MAAM,iEAAiE,CAAC;AAC1G,OAAO,EACH,SAAS,EAEZ,MAAM,oCAAoC,CAAC;AAC5C,OAAO,EAAC,
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,aAAa,EAAC,MAAM,iBAAiB,CAAC;AAC9C,OAAO,EAAC,KAAK,EAAC,MAAM,qBAAqB,CAAC;AAC1C,OAAO,EAAC,QAAQ,EAAe,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAC,kBAAkB,EAAC,MAAM,wCAAwC,CAAC;AAC1E,OAAO,EAAC,aAAa,EAAC,MAAM,qBAAqB,CAAC;AAClD,OAAO,EAAC,UAAU,EAAE,sBAAsB,EAA0B,gBAAgB,EAAC,MAAM,2BAA2B,CAAC;AACvH,OAAO,EAAC,YAAY,EAA2B,MAAM,6BAA6B,CAAC;AACnF,OAAO,EAAC,sBAAsB,EAAC,MAAM,uCAAuC,CAAC;AAC7E,OAAO,EAAC,8BAA8B,EAAC,MAAM,2DAA2D,CAAC;AACzG,OAAO,EAAC,2BAA2B,EAAqC,MAAM,4CAA4C,CAAC;AAC3H,OAAO,EAAC,YAAY,EAAE,oBAAoB,EAAC,MAAM,0CAA0C,CAAC;AAC5F,OAAO,EACH,qBAAqB,EAAqC,cAAc,EAC3E,MAAM,sCAAsC,CAAC;AAM9C,OAAO,EACH,gBAAgB,EAEnB,MAAM,kDAAkD,CAAC;AAC1D,OAAO,EAAC,yBAAyB,EAAC,MAAM,iEAAiE,CAAC;AAC1G,OAAO,EACH,SAAS,EAEZ,MAAM,oCAAoC,CAAC;AAC5C,OAAO,EACH,eAAe,EAClB,MAAM,gCAAgC,CAAC;AACxC,OAAO,EAAC,gBAAgB,EAAC,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAC,WAAW,EAA2B,MAAM,kBAAkB,CAAC;AACvE,OAAO,EAAC,gBAAgB,EAAC,MAAM,oCAAoC,CAAC;AACpE,OAAO,EAAC,gBAAgB,EAAC,MAAM,oCAAoC,CAAC;AACpE,OAAO,EAAC,kBAAkB,EAAC,MAAM,sCAAsC,CAAC;AACxE,OAAO,EAAC,iBAAiB,EAAC,MAAM,qCAAqC,CAAC;AACtE,OAAO,EAAC,iBAAiB,EAAC,MAAM,qCAAqC,CAAC;AACtE,OAAO,EAAC,iBAAiB,EAAC,MAAM,qCAAqC,CAAC;AACtE,OAAO,EAAC,sBAAsB,EAAC,MAAM,0CAA0C,CAAC;AAChF,OAAO,EAAC,8BAA8B,EAAC,MAAM,kDAAkD,CAAC;AAChG,OAAO,EACH,wCAAwC,EAAE,oBAAoB,EACjE,MAAM,8DAA8D,CAAC;AACtE,OAAO,EACH,SAAS,EAAE,YAAY,EAAE,mBAAmB,EAAE,WAAW,EAAE,YAAY,EAE1E,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAC,8BAA8B,EAAC,MAAM,2CAA2C,CAAC;AACzF,OAAO,EAAC,gBAAgB,EAAC,MAAM,6BAA6B,CAAC;AAE7D,OAAO,EAGS,+BAA+B,EAC9C,MAAM,YAAY,CAAC;AAOpB,OAAO,EACH,KAAK,EACL,QAAQ,EAER,aAAa,EACb,kBAAkB,EAClB,UAAU,EACV,gBAAgB,EAChB,sBAAsB,EAEtB,YAAY,EAEZ,sBAAsB,EACtB,8BAA8B,EAC9B,2BAA2B,EAE3B,YAAY,EACZ,oBAAoB,EAWpB,qBAAqB,EAErB,cAAc,EAEd,gBAAgB,EAChB,yBAAyB,EAKzB,SAAS,EAOT,eAAe,EAIf,gBAAgB,EAChB,aAAa,EACb,WAAW,EAEX,gBAAgB,EAChB,gBAAgB,EAChB,kBAAkB,EAClB,iBAAiB,EACjB,iBAAiB,EACjB,iBAAiB,EACjB,sBAAsB,EACtB,8BAA8B,EAC9B,wCAAwC,EACxC,oBAAoB,EAEpB,SAAS,EACT,YAAY,EACZ,mBAAmB,EACnB,WAAW,EACX,YAAY,EAIZ,8BAA8B,EAC9B,gBAAgB,EAUhB,+BAA+B,EAUlC,CAAC"}
|
package/dist/types.d.ts
CHANGED
|
@@ -43,3 +43,34 @@ export type ChatSessionModelFunction<Params extends GbnfJsonSchema | undefined =
|
|
|
43
43
|
readonly handler: (params: GbnfJsonSchemaToType<Params>) => any;
|
|
44
44
|
};
|
|
45
45
|
export declare function isChatModelResponseFunctionCall(item: ChatModelResponse["response"][number]): item is ChatModelFunctionCall;
|
|
46
|
+
export type LLamaContextualRepeatPenalty = {
|
|
47
|
+
/**
|
|
48
|
+
* Number of recent tokens generated by the model to apply penalties to repetition of.
|
|
49
|
+
* Defaults to `64`.
|
|
50
|
+
*/
|
|
51
|
+
lastTokens?: number;
|
|
52
|
+
punishTokensFilter?: (tokens: Token[]) => Token[];
|
|
53
|
+
/**
|
|
54
|
+
* Penalize new line tokens.
|
|
55
|
+
* Enabled by default.
|
|
56
|
+
*/
|
|
57
|
+
penalizeNewLine?: boolean;
|
|
58
|
+
/**
|
|
59
|
+
* The relative amount to lower the probability of the tokens in `punishTokens` by
|
|
60
|
+
* Defaults to `1.1`.
|
|
61
|
+
* Set to `1` to disable.
|
|
62
|
+
*/
|
|
63
|
+
penalty?: number;
|
|
64
|
+
/**
|
|
65
|
+
* For n time a token is in the `punishTokens` array, lower its probability by `n * frequencyPenalty`
|
|
66
|
+
* Disabled by default (`0`).
|
|
67
|
+
* Set to a value between `0` and `1` to enable.
|
|
68
|
+
*/
|
|
69
|
+
frequencyPenalty?: number;
|
|
70
|
+
/**
|
|
71
|
+
* Lower the probability of all the tokens in the `punishTokens` array by `presencePenalty`
|
|
72
|
+
* Disabled by default (`0`).
|
|
73
|
+
* Set to a value between `0` and `1` to enable.
|
|
74
|
+
*/
|
|
75
|
+
presencePenalty?: number;
|
|
76
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"UnsupportedError.js","sourceRoot":"","sources":["../../src/utils/UnsupportedError.ts"],"names":[],"mappings":"AAAA,MAAM,OAAO,gBAAiB,SAAQ,KAAK;IACvC,gBAAgB;IAChB,YAAmB,UAAkB,kBAAkB;QACnD,KAAK,CAAC,OAAO,CAAC,CAAC;IACnB,CAAC;CACJ"}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import { Token, Tokenizer } from "../types.js";
|
|
2
|
+
import { StopGenerationDetector } from "./StopGenerationDetector.js";
|
|
3
|
+
export declare function getQueuedTokensBeforeStopTrigger(triggeredStops: ReturnType<typeof StopGenerationDetector["prototype"]["getTriggeredStops"]>, partiallyFreeTokens: {
|
|
4
|
+
tokens: Token[];
|
|
5
|
+
text: string;
|
|
6
|
+
}, tokenizer: Tokenizer): Token[];
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
export function getQueuedTokensBeforeStopTrigger(triggeredStops, partiallyFreeTokens, tokenizer) {
|
|
2
|
+
if (partiallyFreeTokens.tokens.length === 0 && partiallyFreeTokens.text.length === 0)
|
|
3
|
+
return [];
|
|
4
|
+
else if (partiallyFreeTokens.tokens.length !== 0 && partiallyFreeTokens.text.length === 0)
|
|
5
|
+
return partiallyFreeTokens.tokens;
|
|
6
|
+
else if (partiallyFreeTokens.tokens.length === 0 && partiallyFreeTokens.text.length !== 0)
|
|
7
|
+
return tokenizer(partiallyFreeTokens.text);
|
|
8
|
+
const triggerThatStartsWithStringIndex = triggeredStops.findIndex((trigger) => trigger.stopTrigger.length > 0 && typeof trigger.stopTrigger[0] === "string");
|
|
9
|
+
const triggerThatStartsWithTokenIndex = triggeredStops.findIndex((trigger) => trigger.stopTrigger.length > 0 && typeof trigger.stopTrigger[0] !== "string");
|
|
10
|
+
if (triggerThatStartsWithTokenIndex > 0 && triggerThatStartsWithStringIndex < 0)
|
|
11
|
+
return partiallyFreeTokens.tokens;
|
|
12
|
+
else if (triggerThatStartsWithStringIndex > 0 && triggerThatStartsWithTokenIndex < 0)
|
|
13
|
+
return tokenizer(partiallyFreeTokens.text);
|
|
14
|
+
const stringTokens = tokenizer(partiallyFreeTokens.text);
|
|
15
|
+
if (stringTokens.length === partiallyFreeTokens.tokens.length &&
|
|
16
|
+
stringTokens.every((value, index) => value === partiallyFreeTokens.tokens[index]))
|
|
17
|
+
return stringTokens;
|
|
18
|
+
else if (triggerThatStartsWithStringIndex < triggerThatStartsWithTokenIndex)
|
|
19
|
+
return stringTokens;
|
|
20
|
+
return partiallyFreeTokens.tokens;
|
|
21
|
+
}
|
|
22
|
+
//# sourceMappingURL=getQueuedTokensBeforeStopTrigger.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"getQueuedTokensBeforeStopTrigger.js","sourceRoot":"","sources":["../../src/utils/getQueuedTokensBeforeStopTrigger.ts"],"names":[],"mappings":"AAGA,MAAM,UAAU,gCAAgC,CAC5C,cAA2F,EAC3F,mBAGC,EACD,SAAoB;IAEpB,IAAI,mBAAmB,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,IAAI,CAAC,MAAM,KAAK,CAAC;QAChF,OAAO,EAAE,CAAC;SACT,IAAI,mBAAmB,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,IAAI,CAAC,MAAM,KAAK,CAAC;QACrF,OAAO,mBAAmB,CAAC,MAAM,CAAC;SACjC,IAAI,mBAAmB,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,IAAI,CAAC,MAAM,KAAK,CAAC;QACrF,OAAO,SAAS,CAAC,mBAAmB,CAAC,IAAI,CAAC,CAAC;IAE/C,MAAM,gCAAgC,GAAG,cAAc,CAAC,SAAS,CAC7D,CAAC,OAAO,EAAE,EAAE,CAAC,OAAO,CAAC,WAAW,CAAC,MAAM,GAAG,CAAC,IAAI,OAAO,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,QAAQ,CAC5F,CAAC;IACF,MAAM,+BAA+B,GAAG,cAAc,CAAC,SAAS,CAC5D,CAAC,OAAO,EAAE,EAAE,CAAC,OAAO,CAAC,WAAW,CAAC,MAAM,GAAG,CAAC,IAAI,OAAO,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,QAAQ,CAC5F,CAAC;IAEF,IAAI,+BAA+B,GAAG,CAAC,IAAI,gCAAgC,GAAG,CAAC;QAC3E,OAAO,mBAAmB,CAAC,MAAM,CAAC;SACjC,IAAI,gCAAgC,GAAG,CAAC,IAAI,+BAA+B,GAAG,CAAC;QAChF,OAAO,SAAS,CAAC,mBAAmB,CAAC,IAAI,CAAC,CAAC;IAE/C,MAAM,YAAY,GAAG,SAAS,CAAC,mBAAmB,CAAC,IAAI,CAAC,CAAC;IACzD,IAAI,YAAY,CAAC,MAAM,KAAK,mBAAmB,CAAC,MAAM,CAAC,MAAM;QACzD,YAAY,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,EAAE,CAAC,KAAK,KAAK,mBAAmB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QAEjF,OAAO,YAAY,CAAC;SACnB,IAAI,gCAAgC,GAAG,+BAA+B;QACvE,OAAO,YAAY,CAAC;IAExB,OAAO,mBAAmB,CAAC,MAAM,CAAC;AACtC,CAAC"}
|
package/llama/addon.cpp
CHANGED
|
@@ -87,6 +87,26 @@ Napi::Value getGpuVramInfo(const Napi::CallbackInfo& info) {
|
|
|
87
87
|
return result;
|
|
88
88
|
}
|
|
89
89
|
|
|
90
|
+
static Napi::Value getNapiToken(const Napi::CallbackInfo& info, llama_model* model, llama_token token) {
|
|
91
|
+
auto tokenType = llama_token_get_type(model, token);
|
|
92
|
+
|
|
93
|
+
if (tokenType == LLAMA_TOKEN_TYPE_UNDEFINED || tokenType == LLAMA_TOKEN_TYPE_UNKNOWN) {
|
|
94
|
+
return Napi::Number::From(info.Env(), -1);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return Napi::Number::From(info.Env(), token);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
static Napi::Value getNapiControlToken(const Napi::CallbackInfo& info, llama_model* model, llama_token token) {
|
|
101
|
+
auto tokenType = llama_token_get_type(model, token);
|
|
102
|
+
|
|
103
|
+
if (tokenType != LLAMA_TOKEN_TYPE_CONTROL) {
|
|
104
|
+
return Napi::Number::From(info.Env(), -1);
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
return Napi::Number::From(info.Env(), token);
|
|
108
|
+
}
|
|
109
|
+
|
|
90
110
|
class AddonModel : public Napi::ObjectWrap<AddonModel> {
|
|
91
111
|
public:
|
|
92
112
|
llama_model_params model_params;
|
|
@@ -119,7 +139,6 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
|
|
|
119
139
|
}
|
|
120
140
|
}
|
|
121
141
|
|
|
122
|
-
llama_backend_init(false);
|
|
123
142
|
model = llama_load_model_from_file(modelPath.c_str(), model_params);
|
|
124
143
|
|
|
125
144
|
if (model == NULL) {
|
|
@@ -203,6 +222,15 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
|
|
|
203
222
|
return Napi::Number::From(info.Env(), llama_n_ctx_train(model));
|
|
204
223
|
}
|
|
205
224
|
|
|
225
|
+
Napi::Value GetEmbeddingVectorSize(const Napi::CallbackInfo& info) {
|
|
226
|
+
if (disposed) {
|
|
227
|
+
Napi::Error::New(info.Env(), "Context is disposed").ThrowAsJavaScriptException();
|
|
228
|
+
return info.Env().Undefined();
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
return Napi::Number::From(info.Env(), llama_n_embd(model));
|
|
232
|
+
}
|
|
233
|
+
|
|
206
234
|
Napi::Value GetTotalSize(const Napi::CallbackInfo& info) {
|
|
207
235
|
if (disposed) {
|
|
208
236
|
Napi::Error::New(info.Env(), "Context is disposed").ThrowAsJavaScriptException();
|
|
@@ -239,7 +267,7 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
|
|
|
239
267
|
return info.Env().Undefined();
|
|
240
268
|
}
|
|
241
269
|
|
|
242
|
-
return
|
|
270
|
+
return getNapiControlToken(info, model, llama_token_bos(model));
|
|
243
271
|
}
|
|
244
272
|
Napi::Value TokenEos(const Napi::CallbackInfo& info) {
|
|
245
273
|
if (disposed) {
|
|
@@ -247,7 +275,7 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
|
|
|
247
275
|
return info.Env().Undefined();
|
|
248
276
|
}
|
|
249
277
|
|
|
250
|
-
return
|
|
278
|
+
return getNapiControlToken(info, model, llama_token_eos(model));
|
|
251
279
|
}
|
|
252
280
|
Napi::Value TokenNl(const Napi::CallbackInfo& info) {
|
|
253
281
|
if (disposed) {
|
|
@@ -255,7 +283,7 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
|
|
|
255
283
|
return info.Env().Undefined();
|
|
256
284
|
}
|
|
257
285
|
|
|
258
|
-
return
|
|
286
|
+
return getNapiToken(info, model, llama_token_nl(model));
|
|
259
287
|
}
|
|
260
288
|
Napi::Value PrefixToken(const Napi::CallbackInfo& info) {
|
|
261
289
|
if (disposed) {
|
|
@@ -263,7 +291,7 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
|
|
|
263
291
|
return info.Env().Undefined();
|
|
264
292
|
}
|
|
265
293
|
|
|
266
|
-
return
|
|
294
|
+
return getNapiControlToken(info, model, llama_token_prefix(model));
|
|
267
295
|
}
|
|
268
296
|
Napi::Value MiddleToken(const Napi::CallbackInfo& info) {
|
|
269
297
|
if (disposed) {
|
|
@@ -271,7 +299,7 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
|
|
|
271
299
|
return info.Env().Undefined();
|
|
272
300
|
}
|
|
273
301
|
|
|
274
|
-
return
|
|
302
|
+
return getNapiControlToken(info, model, llama_token_middle(model));
|
|
275
303
|
}
|
|
276
304
|
Napi::Value SuffixToken(const Napi::CallbackInfo& info) {
|
|
277
305
|
if (disposed) {
|
|
@@ -279,7 +307,7 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
|
|
|
279
307
|
return info.Env().Undefined();
|
|
280
308
|
}
|
|
281
309
|
|
|
282
|
-
return
|
|
310
|
+
return getNapiControlToken(info, model, llama_token_suffix(model));
|
|
283
311
|
}
|
|
284
312
|
Napi::Value EotToken(const Napi::CallbackInfo& info) {
|
|
285
313
|
if (disposed) {
|
|
@@ -287,7 +315,7 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
|
|
|
287
315
|
return info.Env().Undefined();
|
|
288
316
|
}
|
|
289
317
|
|
|
290
|
-
return
|
|
318
|
+
return getNapiControlToken(info, model, llama_token_eot(model));
|
|
291
319
|
}
|
|
292
320
|
Napi::Value GetTokenString(const Napi::CallbackInfo& info) {
|
|
293
321
|
if (disposed) {
|
|
@@ -308,6 +336,29 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
|
|
|
308
336
|
return Napi::String::New(info.Env(), ss.str());
|
|
309
337
|
}
|
|
310
338
|
|
|
339
|
+
Napi::Value GetTokenType(const Napi::CallbackInfo& info) {
|
|
340
|
+
if (disposed) {
|
|
341
|
+
Napi::Error::New(info.Env(), "Context is disposed").ThrowAsJavaScriptException();
|
|
342
|
+
return info.Env().Undefined();
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
if (info[0].IsNumber() == false) {
|
|
346
|
+
return Napi::Number::From(info.Env(), int32_t(LLAMA_TOKEN_TYPE_UNDEFINED));
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
int token = info[0].As<Napi::Number>().Int32Value();
|
|
350
|
+
auto tokenType = llama_token_get_type(model, token);
|
|
351
|
+
|
|
352
|
+
return Napi::Number::From(info.Env(), int32_t(tokenType));
|
|
353
|
+
}
|
|
354
|
+
Napi::Value ShouldPrependBosToken(const Napi::CallbackInfo& info) {
|
|
355
|
+
const int addBos = llama_add_bos_token(model);
|
|
356
|
+
|
|
357
|
+
bool shouldPrependBos = addBos != -1 ? bool(addBos) : (llama_vocab_type(model) == LLAMA_VOCAB_TYPE_SPM);
|
|
358
|
+
|
|
359
|
+
return Napi::Boolean::New(info.Env(), shouldPrependBos);
|
|
360
|
+
}
|
|
361
|
+
|
|
311
362
|
static void init(Napi::Object exports) {
|
|
312
363
|
exports.Set(
|
|
313
364
|
"AddonModel",
|
|
@@ -318,6 +369,7 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
|
|
|
318
369
|
InstanceMethod("tokenize", &AddonModel::Tokenize),
|
|
319
370
|
InstanceMethod("detokenize", &AddonModel::Detokenize),
|
|
320
371
|
InstanceMethod("getTrainContextSize", &AddonModel::GetTrainContextSize),
|
|
372
|
+
InstanceMethod("getEmbeddingVectorSize", &AddonModel::GetEmbeddingVectorSize),
|
|
321
373
|
InstanceMethod("getTotalSize", &AddonModel::GetTotalSize),
|
|
322
374
|
InstanceMethod("getTotalParameters", &AddonModel::GetTotalParameters),
|
|
323
375
|
InstanceMethod("getModelDescription", &AddonModel::GetModelDescription),
|
|
@@ -329,6 +381,8 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
|
|
|
329
381
|
InstanceMethod("suffixToken", &AddonModel::SuffixToken),
|
|
330
382
|
InstanceMethod("eotToken", &AddonModel::EotToken),
|
|
331
383
|
InstanceMethod("getTokenString", &AddonModel::GetTokenString),
|
|
384
|
+
InstanceMethod("getTokenType", &AddonModel::GetTokenType),
|
|
385
|
+
InstanceMethod("shouldPrependBosToken", &AddonModel::ShouldPrependBosToken),
|
|
332
386
|
InstanceMethod("dispose", &AddonModel::Dispose),
|
|
333
387
|
}
|
|
334
388
|
)
|
|
@@ -993,7 +1047,7 @@ Napi::Value setLoggerLogLevel(const Napi::CallbackInfo& info) {
|
|
|
993
1047
|
}
|
|
994
1048
|
|
|
995
1049
|
Napi::Object registerCallback(Napi::Env env, Napi::Object exports) {
|
|
996
|
-
llama_backend_init(
|
|
1050
|
+
llama_backend_init();
|
|
997
1051
|
exports.DefineProperties({
|
|
998
1052
|
Napi::PropertyDescriptor::Function("systemInfo", systemInfo),
|
|
999
1053
|
Napi::PropertyDescriptor::Function("setLogger", setLogger),
|
package/llama/gitRelease.bundle
CHANGED
|
Binary file
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","arch":"arm64","computeLayers":{"metal":false,"cuda":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"
|
|
1
|
+
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","arch":"arm64","computeLayers":{"metal":false,"cuda":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2174"}}}
|
|
Binary file
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","arch":"armv7l","computeLayers":{"metal":false,"cuda":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"
|
|
1
|
+
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","arch":"armv7l","computeLayers":{"metal":false,"cuda":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2174"}}}
|
|
Binary file
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","arch":"x64","computeLayers":{"metal":false,"cuda":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"
|
|
1
|
+
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","arch":"x64","computeLayers":{"metal":false,"cuda":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2174"}}}
|
|
Binary file
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","arch":"x64","computeLayers":{"metal":false,"cuda":true},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"
|
|
1
|
+
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","arch":"x64","computeLayers":{"metal":false,"cuda":true},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2174"}}}
|
|
Binary file
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"mac","arch":"arm64","computeLayers":{"metal":true,"cuda":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"
|
|
1
|
+
{"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"mac","arch":"arm64","computeLayers":{"metal":true,"cuda":false},"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2174"}}}
|