node-llama-cpp 3.0.0-beta.10 → 3.0.0-beta.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. package/README.md +4 -4
  2. package/dist/bindings/AddonTypes.d.ts +3 -0
  3. package/dist/bindings/Llama.d.ts +1 -0
  4. package/dist/bindings/Llama.js +7 -1
  5. package/dist/bindings/Llama.js.map +1 -1
  6. package/dist/bindings/getLlama.d.ts +24 -1
  7. package/dist/bindings/getLlama.js +10 -4
  8. package/dist/bindings/getLlama.js.map +1 -1
  9. package/dist/bindings/types.d.ts +1 -0
  10. package/dist/bindings/types.js.map +1 -1
  11. package/dist/bindings/utils/compileLLamaCpp.js +2 -0
  12. package/dist/bindings/utils/compileLLamaCpp.js.map +1 -1
  13. package/dist/bindings/utils/getBuildFolderNameForBuildOptions.js +2 -0
  14. package/dist/bindings/utils/getBuildFolderNameForBuildOptions.js.map +1 -1
  15. package/dist/bindings/utils/resolveChatWrapperBasedOnWrapperTypeName.d.ts +26 -0
  16. package/dist/bindings/utils/resolveChatWrapperBasedOnWrapperTypeName.js +43 -0
  17. package/dist/bindings/utils/resolveChatWrapperBasedOnWrapperTypeName.js.map +1 -0
  18. package/dist/bindings/utils/resolveCustomCmakeOptions.js +2 -0
  19. package/dist/bindings/utils/resolveCustomCmakeOptions.js.map +1 -1
  20. package/dist/cli/cli.js +4 -0
  21. package/dist/cli/cli.js.map +1 -1
  22. package/dist/cli/commands/BuildCommand.d.ts +2 -1
  23. package/dist/cli/commands/BuildCommand.js +11 -9
  24. package/dist/cli/commands/BuildCommand.js.map +1 -1
  25. package/dist/cli/commands/ChatCommand.d.ts +2 -2
  26. package/dist/cli/commands/ChatCommand.js +3 -39
  27. package/dist/cli/commands/ChatCommand.js.map +1 -1
  28. package/dist/cli/commands/CompleteCommand.d.ts +25 -0
  29. package/dist/cli/commands/CompleteCommand.js +278 -0
  30. package/dist/cli/commands/CompleteCommand.js.map +1 -0
  31. package/dist/cli/commands/DebugCommand.js +16 -13
  32. package/dist/cli/commands/DebugCommand.js.map +1 -1
  33. package/dist/cli/commands/DownloadCommand.d.ts +2 -1
  34. package/dist/cli/commands/DownloadCommand.js +11 -9
  35. package/dist/cli/commands/DownloadCommand.js.map +1 -1
  36. package/dist/cli/commands/InfillCommand.d.ts +27 -0
  37. package/dist/cli/commands/InfillCommand.js +316 -0
  38. package/dist/cli/commands/InfillCommand.js.map +1 -0
  39. package/dist/cli/utils/logEnabledComputeLayers.d.ts +8 -0
  40. package/dist/cli/utils/logEnabledComputeLayers.js +11 -0
  41. package/dist/cli/utils/logEnabledComputeLayers.js.map +1 -0
  42. package/dist/config.d.ts +1 -0
  43. package/dist/config.js +5 -2
  44. package/dist/config.js.map +1 -1
  45. package/dist/consts.d.ts +1 -0
  46. package/dist/consts.js +2 -0
  47. package/dist/consts.js.map +1 -0
  48. package/dist/evaluator/LlamaChat/LlamaChat.d.ts +2 -33
  49. package/dist/evaluator/LlamaChat/LlamaChat.js +7 -28
  50. package/dist/evaluator/LlamaChat/LlamaChat.js.map +1 -1
  51. package/dist/evaluator/LlamaChatSession/LlamaChatSession.js +1 -1
  52. package/dist/evaluator/LlamaChatSession/LlamaChatSession.js.map +1 -1
  53. package/dist/evaluator/LlamaCompletion.d.ts +148 -0
  54. package/dist/evaluator/LlamaCompletion.js +402 -0
  55. package/dist/evaluator/LlamaCompletion.js.map +1 -0
  56. package/dist/evaluator/LlamaContext/LlamaContext.js +6 -2
  57. package/dist/evaluator/LlamaContext/LlamaContext.js.map +1 -1
  58. package/dist/evaluator/LlamaModel.d.ts +10 -1
  59. package/dist/evaluator/LlamaModel.js +33 -3
  60. package/dist/evaluator/LlamaModel.js.map +1 -1
  61. package/dist/index.d.ts +6 -4
  62. package/dist/index.js +4 -2
  63. package/dist/index.js.map +1 -1
  64. package/dist/types.d.ts +31 -0
  65. package/dist/utils/UnsupportedError.d.ts +2 -0
  66. package/dist/utils/UnsupportedError.js +7 -0
  67. package/dist/utils/UnsupportedError.js.map +1 -0
  68. package/dist/utils/gbnfJson/terminals/GbnfArray.js.map +1 -1
  69. package/dist/utils/gbnfJson/terminals/GbnfBoolean.d.ts +1 -1
  70. package/dist/utils/gbnfJson/terminals/GbnfBoolean.js.map +1 -1
  71. package/dist/utils/gbnfJson/terminals/GbnfBooleanValue.js.map +1 -1
  72. package/dist/utils/gbnfJson/terminals/GbnfGrammar.js.map +1 -1
  73. package/dist/utils/gbnfJson/terminals/GbnfNull.d.ts +1 -1
  74. package/dist/utils/gbnfJson/terminals/GbnfNull.js.map +1 -1
  75. package/dist/utils/gbnfJson/terminals/GbnfNumber.d.ts +1 -1
  76. package/dist/utils/gbnfJson/terminals/GbnfNumber.js.map +1 -1
  77. package/dist/utils/gbnfJson/terminals/GbnfNumberValue.js.map +1 -1
  78. package/dist/utils/gbnfJson/terminals/GbnfObjectMap.js.map +1 -1
  79. package/dist/utils/gbnfJson/terminals/GbnfOr.js.map +1 -1
  80. package/dist/utils/gbnfJson/terminals/GbnfString.d.ts +1 -1
  81. package/dist/utils/gbnfJson/terminals/GbnfString.js.map +1 -1
  82. package/dist/utils/gbnfJson/terminals/GbnfStringValue.js.map +1 -1
  83. package/dist/utils/gbnfJson/terminals/GbnfVerbatimText.js.map +1 -1
  84. package/dist/utils/gbnfJson/terminals/GbnfWhitespace.d.ts +1 -1
  85. package/dist/utils/gbnfJson/terminals/GbnfWhitespace.js.map +1 -1
  86. package/dist/utils/getBuildDefaults.d.ts +1 -0
  87. package/dist/utils/getBuildDefaults.js +3 -2
  88. package/dist/utils/getBuildDefaults.js.map +1 -1
  89. package/dist/utils/getQueuedTokensBeforeStopTrigger.d.ts +6 -0
  90. package/dist/utils/getQueuedTokensBeforeStopTrigger.js +22 -0
  91. package/dist/utils/getQueuedTokensBeforeStopTrigger.js.map +1 -0
  92. package/llama/CMakeLists.txt +20 -0
  93. package/llama/addon.cpp +97 -12
  94. package/llama/binariesGithubRelease.json +1 -1
  95. package/llama/gitRelease.bundle +0 -0
  96. package/llama/gpuInfo/cuda-gpu-info.cu +5 -5
  97. package/llama/gpuInfo/cuda-gpu-info.h +2 -2
  98. package/llama/gpuInfo/vulkan-gpu-info.cpp +65 -0
  99. package/llama/gpuInfo/vulkan-gpu-info.h +7 -0
  100. package/llama/llama.cpp.info.json +1 -1
  101. package/llamaBins/linux-arm64/.buildMetadata.json +1 -1
  102. package/llamaBins/linux-arm64/llama-addon.node +0 -0
  103. package/llamaBins/linux-armv7l/.buildMetadata.json +1 -1
  104. package/llamaBins/linux-armv7l/llama-addon.node +0 -0
  105. package/llamaBins/linux-x64/.buildMetadata.json +1 -1
  106. package/llamaBins/linux-x64/llama-addon.node +0 -0
  107. package/llamaBins/linux-x64-cuda/.buildMetadata.json +1 -1
  108. package/llamaBins/linux-x64-cuda/llama-addon.node +0 -0
  109. package/llamaBins/linux-x64-vulkan/.buildMetadata.json +1 -0
  110. package/llamaBins/linux-x64-vulkan/llama-addon.node +0 -0
  111. package/llamaBins/mac-arm64-metal/.buildMetadata.json +1 -1
  112. package/llamaBins/mac-arm64-metal/ggml-metal.metal +1035 -132
  113. package/llamaBins/mac-arm64-metal/llama-addon.node +0 -0
  114. package/llamaBins/mac-x64/.buildMetadata.json +1 -1
  115. package/llamaBins/mac-x64/llama-addon.node +0 -0
  116. package/llamaBins/win-x64/.buildMetadata.json +1 -1
  117. package/llamaBins/win-x64/llama-addon.exp +0 -0
  118. package/llamaBins/win-x64/llama-addon.lib +0 -0
  119. package/llamaBins/win-x64/llama-addon.node +0 -0
  120. package/llamaBins/win-x64-cuda/.buildMetadata.json +1 -1
  121. package/llamaBins/win-x64-cuda/llama-addon.exp +0 -0
  122. package/llamaBins/win-x64-cuda/llama-addon.lib +0 -0
  123. package/llamaBins/win-x64-cuda/llama-addon.node +0 -0
  124. package/llamaBins/win-x64-vulkan/.buildMetadata.json +1 -0
  125. package/llamaBins/win-x64-vulkan/llama-addon.exp +0 -0
  126. package/llamaBins/win-x64-vulkan/llama-addon.lib +0 -0
  127. package/llamaBins/win-x64-vulkan/llama-addon.node +0 -0
  128. package/package.json +3 -2
  129. package/dist/AbortError.d.ts +0 -2
  130. package/dist/AbortError.js +0 -7
  131. package/dist/AbortError.js.map +0 -1
@@ -0,0 +1,27 @@
1
+ import { CommandModule } from "yargs";
2
+ type InfillCommand = {
3
+ model: string;
4
+ systemInfo: boolean;
5
+ prefix?: string;
6
+ prefixFile?: string;
7
+ suffix?: string;
8
+ suffixFile?: string;
9
+ contextSize: number;
10
+ batchSize?: number;
11
+ threads: number;
12
+ temperature: number;
13
+ minP: number;
14
+ topK: number;
15
+ topP: number;
16
+ gpuLayers?: number;
17
+ repeatPenalty: number;
18
+ lastTokensRepeatPenalty: number;
19
+ penalizeRepeatingNewLine: boolean;
20
+ repeatFrequencyPenalty?: number;
21
+ repeatPresencePenalty?: number;
22
+ maxTokens: number;
23
+ noInfoLog: boolean;
24
+ printTimings: boolean;
25
+ };
26
+ export declare const InfillCommand: CommandModule<object, InfillCommand>;
27
+ export {};
@@ -0,0 +1,316 @@
1
+ import * as readline from "readline";
2
+ import process from "process";
3
+ import path from "path";
4
+ import chalk from "chalk";
5
+ import fs from "fs-extra";
6
+ import withStatusLogs from "../../utils/withStatusLogs.js";
7
+ import { getLlama } from "../../bindings/getLlama.js";
8
+ import { LlamaModel } from "../../evaluator/LlamaModel.js";
9
+ import { LlamaContext } from "../../evaluator/LlamaContext/LlamaContext.js";
10
+ import { LlamaLogLevel } from "../../bindings/types.js";
11
+ import { LlamaCompletion } from "../../evaluator/LlamaCompletion.js";
12
+ export const InfillCommand = {
13
+ command: "infill",
14
+ describe: "Generate an infill completion for a given suffix and prefix texts",
15
+ builder(yargs) {
16
+ return yargs
17
+ .option("model", {
18
+ alias: "m",
19
+ type: "string",
20
+ demandOption: true,
21
+ description: "Llama model file to use for the chat",
22
+ group: "Required:"
23
+ })
24
+ .option("systemInfo", {
25
+ alias: "i",
26
+ type: "boolean",
27
+ default: false,
28
+ description: "Print llama.cpp system info",
29
+ group: "Optional:"
30
+ })
31
+ .option("prefix", {
32
+ type: "string",
33
+ description: "First prefix text to automatically load",
34
+ group: "Optional:"
35
+ })
36
+ .option("prefixFile", {
37
+ type: "string",
38
+ description: "Path to a file to load prefix text from automatically",
39
+ group: "Optional:"
40
+ })
41
+ .option("suffix", {
42
+ type: "string",
43
+ description: "First suffix text to automatically load. Requires `prefix` or `prefixFile` to be set",
44
+ group: "Optional:"
45
+ })
46
+ .option("suffixFile", {
47
+ type: "string",
48
+ description: "Path to a file to load suffix text from automatically. Requires `prefix` or `prefixFile` to be set",
49
+ group: "Optional:"
50
+ })
51
+ .option("contextSize", {
52
+ alias: "c",
53
+ type: "number",
54
+ default: 1024 * 4,
55
+ description: "Context size to use for the model context",
56
+ group: "Optional:"
57
+ })
58
+ .option("batchSize", {
59
+ alias: "b",
60
+ type: "number",
61
+ description: "Batch size to use for the model context. The default value is the context size",
62
+ group: "Optional:"
63
+ })
64
+ .option("threads", {
65
+ type: "number",
66
+ default: 6,
67
+ description: "Number of threads to use for the evaluation of tokens",
68
+ group: "Optional:"
69
+ })
70
+ .option("temperature", {
71
+ alias: "t",
72
+ type: "number",
73
+ default: 0,
74
+ description: "Temperature is a hyperparameter that controls the randomness of the generated text. It affects the probability distribution of the model's output tokens. A higher temperature (e.g., 1.5) makes the output more random and creative, while a lower temperature (e.g., 0.5) makes the output more focused, deterministic, and conservative. The suggested temperature is 0.8, which provides a balance between randomness and determinism. At the extreme, a temperature of 0 will always pick the most likely next token, leading to identical outputs in each run. Set to `0` to disable.",
75
+ group: "Optional:"
76
+ })
77
+ .option("minP", {
78
+ alias: "mp",
79
+ type: "number",
80
+ default: 0,
81
+ description: "From the next token candidates, discard the percentage of tokens with the lowest probability. For example, if set to `0.05`, 5% of the lowest probability tokens will be discarded. This is useful for generating more high-quality results when using a high temperature. Set to a value between `0` and `1` to enable. Only relevant when `temperature` is set to a value greater than `0`.",
82
+ group: "Optional:"
83
+ })
84
+ .option("topK", {
85
+ alias: "k",
86
+ type: "number",
87
+ default: 40,
88
+ description: "Limits the model to consider only the K most likely next tokens for sampling at each step of sequence generation. An integer number between `1` and the size of the vocabulary. Set to `0` to disable (which uses the full vocabulary). Only relevant when `temperature` is set to a value greater than 0.",
89
+ group: "Optional:"
90
+ })
91
+ .option("topP", {
92
+ alias: "p",
93
+ type: "number",
94
+ default: 0.95,
95
+ description: "Dynamically selects the smallest set of tokens whose cumulative probability exceeds the threshold P, and samples the next token only from this set. A float number between `0` and `1`. Set to `1` to disable. Only relevant when `temperature` is set to a value greater than `0`.",
96
+ group: "Optional:"
97
+ })
98
+ .option("gpuLayers", {
99
+ alias: "gl",
100
+ type: "number",
101
+ description: "number of layers to store in VRAM",
102
+ group: "Optional:"
103
+ })
104
+ .option("repeatPenalty", {
105
+ alias: "rp",
106
+ type: "number",
107
+ default: 1.1,
108
+ description: "Prevent the model from repeating the same token too much. Set to `1` to disable.",
109
+ group: "Optional:"
110
+ })
111
+ .option("lastTokensRepeatPenalty", {
112
+ alias: "rpn",
113
+ type: "number",
114
+ default: 64,
115
+ description: "Number of recent tokens generated by the model to apply penalties to repetition of",
116
+ group: "Optional:"
117
+ })
118
+ .option("penalizeRepeatingNewLine", {
119
+ alias: "rpnl",
120
+ type: "boolean",
121
+ default: true,
122
+ description: "Penalize new line tokens. set \"--no-penalizeRepeatingNewLine\" or \"--no-rpnl\" to disable",
123
+ group: "Optional:"
124
+ })
125
+ .option("repeatFrequencyPenalty", {
126
+ alias: "rfp",
127
+ type: "number",
128
+ description: "For n time a token is in the `punishTokens` array, lower its probability by `n * repeatFrequencyPenalty`. Set to a value between `0` and `1` to enable.",
129
+ group: "Optional:"
130
+ })
131
+ .option("repeatPresencePenalty", {
132
+ alias: "rpp",
133
+ type: "number",
134
+ description: "Lower the probability of all the tokens in the `punishTokens` array by `repeatPresencePenalty`. Set to a value between `0` and `1` to enable.",
135
+ group: "Optional:"
136
+ })
137
+ .option("maxTokens", {
138
+ alias: "mt",
139
+ type: "number",
140
+ default: 0,
141
+ description: "Maximum number of tokens to generate in responses. Set to `0` to disable. Set to `-1` to set to the context size",
142
+ group: "Optional:"
143
+ })
144
+ .option("noInfoLog", {
145
+ alias: "nl",
146
+ type: "boolean",
147
+ default: false,
148
+ description: "Disable llama.cpp info logs",
149
+ group: "Optional:"
150
+ })
151
+ .option("printTimings", {
152
+ alias: "pt",
153
+ type: "boolean",
154
+ default: false,
155
+ description: "Print llama.cpp timings after each response",
156
+ group: "Optional:"
157
+ });
158
+ },
159
+ async handler({ model, systemInfo, prefix, prefixFile, suffix, suffixFile, contextSize, batchSize, threads, temperature, minP, topK, topP, gpuLayers, repeatPenalty, lastTokensRepeatPenalty, penalizeRepeatingNewLine, repeatFrequencyPenalty, repeatPresencePenalty, maxTokens, noInfoLog, printTimings }) {
160
+ try {
161
+ await RunInfill({
162
+ model, systemInfo, prefix, prefixFile, suffix, suffixFile, contextSize, batchSize,
163
+ threads, temperature, minP, topK, topP, gpuLayers, lastTokensRepeatPenalty,
164
+ repeatPenalty, penalizeRepeatingNewLine, repeatFrequencyPenalty, repeatPresencePenalty, maxTokens,
165
+ noInfoLog, printTimings
166
+ });
167
+ }
168
+ catch (err) {
169
+ console.error(err);
170
+ process.exit(1);
171
+ }
172
+ }
173
+ };
174
+ async function RunInfill({ model: modelArg, systemInfo, prefix, prefixFile, suffix, suffixFile, contextSize, batchSize, threads, temperature, minP, topK, topP, gpuLayers, lastTokensRepeatPenalty, repeatPenalty, penalizeRepeatingNewLine, repeatFrequencyPenalty, repeatPresencePenalty, maxTokens, noInfoLog, printTimings }) {
175
+ if (noInfoLog)
176
+ console.info(`${chalk.yellow("Log level:")} warn`);
177
+ const llama = await getLlama("lastBuild", {
178
+ logLevel: noInfoLog
179
+ ? LlamaLogLevel.warn
180
+ : LlamaLogLevel.debug
181
+ });
182
+ const logBatchSize = batchSize != null;
183
+ if (systemInfo)
184
+ console.log(llama.systemInfo);
185
+ if (prefixFile != null && prefixFile !== "") {
186
+ if (prefix != null && prefix !== "")
187
+ console.warn(chalk.yellow("Both `prefix` and `prefixFile` were specified. `prefixFile` will be used."));
188
+ prefix = await fs.readFile(path.resolve(process.cwd(), prefixFile), "utf8");
189
+ }
190
+ if (suffixFile != null && suffixFile !== "") {
191
+ if (suffix != null && suffix !== "")
192
+ console.warn(chalk.yellow("Both `suffix` and `suffixFile` were specified. `suffixFile` will be used."));
193
+ suffix = await fs.readFile(path.resolve(process.cwd(), suffixFile), "utf8");
194
+ }
195
+ if (suffix != null && prefix == null) {
196
+ console.warn(chalk.yellow("Suffix was specified but no prefix was specified. Suffix will be ignored."));
197
+ suffix = undefined;
198
+ }
199
+ if (batchSize == null)
200
+ batchSize = contextSize;
201
+ else if (batchSize > contextSize) {
202
+ console.warn(chalk.yellow("Batch size is greater than the context size. Batch size will be set to the context size."));
203
+ batchSize = contextSize;
204
+ }
205
+ let initialPrefix = prefix ?? null;
206
+ let initialSuffix = suffix ?? null;
207
+ const model = await withStatusLogs({
208
+ loading: chalk.blue("Loading model"),
209
+ success: chalk.blue("Model loaded"),
210
+ fail: chalk.blue("Failed to load model")
211
+ }, async () => new LlamaModel({
212
+ llama,
213
+ modelPath: path.resolve(process.cwd(), modelArg),
214
+ gpuLayers: gpuLayers != null ? gpuLayers : undefined
215
+ }));
216
+ const context = await withStatusLogs({
217
+ loading: chalk.blue("Creating context"),
218
+ success: chalk.blue("Context created"),
219
+ fail: chalk.blue("Failed to create context")
220
+ }, async () => new LlamaContext({
221
+ model,
222
+ contextSize,
223
+ batchSize,
224
+ threads
225
+ }));
226
+ const completion = new LlamaCompletion({
227
+ contextSequence: context.getSequence()
228
+ });
229
+ await new Promise((accept) => setTimeout(accept, 0)); // wait for logs to finish printing
230
+ console.info(`${chalk.yellow("Context size:")} ${context.contextSize}`);
231
+ if (logBatchSize)
232
+ console.info(`${chalk.yellow("Batch size:")} ${context.batchSize}`);
233
+ console.info(`${chalk.yellow("Train context size:")} ${model.trainContextSize}`);
234
+ console.info(`${chalk.yellow("Model type:")} ${model.typeDescription}`);
235
+ console.info(`${chalk.yellow("Repeat penalty:")} ${repeatPenalty} (apply to last ${lastTokensRepeatPenalty} tokens)`);
236
+ if (repeatFrequencyPenalty != null)
237
+ console.info(`${chalk.yellow("Repeat frequency penalty:")} ${repeatFrequencyPenalty}`);
238
+ if (repeatPresencePenalty != null)
239
+ console.info(`${chalk.yellow("Repeat presence penalty:")} ${repeatPresencePenalty}`);
240
+ if (!penalizeRepeatingNewLine)
241
+ console.info(`${chalk.yellow("Penalize repeating new line:")} disabled`);
242
+ // this is for ora to not interfere with readline
243
+ await new Promise(resolve => setTimeout(resolve, 1));
244
+ if (!completion.infillSupported) {
245
+ console.log(chalk.red("Infill is not supported for this model"));
246
+ process.exit(1);
247
+ }
248
+ const replPrefixHistory = [];
249
+ const replSuffixHistory = [];
250
+ async function getInput(name) {
251
+ const rl = readline.createInterface({
252
+ input: process.stdin,
253
+ output: process.stdout,
254
+ history: name === "Prefix"
255
+ ? replPrefixHistory.slice()
256
+ : replSuffixHistory.slice()
257
+ });
258
+ const res = await new Promise((accept) => rl.question(chalk.yellow(name + "> "), accept));
259
+ rl.close();
260
+ return res;
261
+ }
262
+ // eslint-disable-next-line no-constant-condition
263
+ while (true) {
264
+ const prefixInput = initialPrefix != null
265
+ ? initialPrefix
266
+ : await getInput("Prefix");
267
+ if (initialPrefix != null) {
268
+ console.log(chalk.green("Prefix> ") + initialPrefix);
269
+ initialPrefix = null;
270
+ }
271
+ else
272
+ await replPrefixHistory.push(prefixInput);
273
+ if (prefixInput === ".exit")
274
+ break;
275
+ const suffixInput = initialSuffix != null
276
+ ? initialSuffix
277
+ : await getInput("Suffix");
278
+ if (initialSuffix != null) {
279
+ console.log(chalk.green("Suffix> ") + initialSuffix);
280
+ initialSuffix = null;
281
+ }
282
+ else
283
+ await replSuffixHistory.push(suffixInput);
284
+ if (suffixInput === ".exit")
285
+ break;
286
+ process.stdout.write(chalk.yellow("Infill: "));
287
+ const [startColor, endColor] = chalk.blue("MIDDLE").split("MIDDLE");
288
+ process.stdout.write(startColor);
289
+ await completion.generateInfillCompletion(prefixInput, suffixInput, {
290
+ temperature,
291
+ minP,
292
+ topK,
293
+ topP,
294
+ repeatPenalty: {
295
+ penalty: repeatPenalty,
296
+ frequencyPenalty: repeatFrequencyPenalty != null ? repeatFrequencyPenalty : undefined,
297
+ presencePenalty: repeatPresencePenalty != null ? repeatPresencePenalty : undefined,
298
+ penalizeNewLine: penalizeRepeatingNewLine,
299
+ lastTokens: lastTokensRepeatPenalty
300
+ },
301
+ maxTokens: maxTokens === -1
302
+ ? context.contextSize
303
+ : maxTokens <= 0
304
+ ? undefined
305
+ : maxTokens,
306
+ onToken(chunk) {
307
+ process.stdout.write(model.detokenize(chunk));
308
+ }
309
+ });
310
+ process.stdout.write(endColor);
311
+ console.log();
312
+ if (printTimings)
313
+ await context.printTimings();
314
+ }
315
+ }
316
+ //# sourceMappingURL=InfillCommand.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"InfillCommand.js","sourceRoot":"","sources":["../../../src/cli/commands/InfillCommand.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,QAAQ,MAAM,UAAU,CAAC;AACrC,OAAO,OAAO,MAAM,SAAS,CAAC;AAC9B,OAAO,IAAI,MAAM,MAAM,CAAC;AAExB,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,EAAE,MAAM,UAAU,CAAC;AAC1B,OAAO,cAAc,MAAM,+BAA+B,CAAC;AAC3D,OAAO,EAAC,QAAQ,EAAC,MAAM,4BAA4B,CAAC;AACpD,OAAO,EAAC,UAAU,EAAC,MAAM,+BAA+B,CAAC;AACzD,OAAO,EAAC,YAAY,EAAC,MAAM,8CAA8C,CAAC;AAC1E,OAAO,EAAC,aAAa,EAAC,MAAM,yBAAyB,CAAC;AACtD,OAAO,EAAC,eAAe,EAAC,MAAM,oCAAoC,CAAC;AA2BnE,MAAM,CAAC,MAAM,aAAa,GAAyC;IAC/D,OAAO,EAAE,QAAQ;IACjB,QAAQ,EAAE,mEAAmE;IAC7E,OAAO,CAAC,KAAK;QACT,OAAO,KAAK;aACP,MAAM,CAAC,OAAO,EAAE;YACb,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,YAAY,EAAE,IAAI;YAClB,WAAW,EAAE,sCAAsC;YACnD,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,YAAY,EAAE;YAClB,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,SAAS;YACf,OAAO,EAAE,KAAK;YACd,WAAW,EAAE,6BAA6B;YAC1C,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,QAAQ,EAAE;YACd,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,yCAAyC;YACtD,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,YAAY,EAAE;YAClB,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,uDAAuD;YACpE,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,QAAQ,EAAE;YACd,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,sFAAsF;YACnG,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,YAAY,EAAE;YAClB,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,oGAAoG;YACjH,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,aAAa,EAAE;YACnB,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,IAAI,GAAG,CAAC;YACjB,WAAW,EAAE,2CAA2C;YACxD,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,WAAW,EAAE;YACjB,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,gFAAgF;YAC7F,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,SAAS,EAAE;YACf,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC;YACV,WAAW,EAAE,uDAAuD;YACpE,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,aAAa,EAAE;YACnB,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC;YACV,WAAW,EAAE,6jBAA6jB;YAC1kB,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,MAAM,EAAE;YACZ,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC;YACV,WAAW,EAAE,+XAA+X;YAC5Y,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,MAAM,EAAE;YACZ,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,4SAA4S;YACzT,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,MAAM,EAAE;YACZ,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,IAAI;YACb,WAAW,EAAE,qRAAqR;YAClS,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,WAAW,EAAE;YACjB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,mCAAmC;YAChD,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,eAAe,EAAE;YACrB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,GAAG;YACZ,WAAW,EAAE,kFAAkF;YAC/F,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,yBAAyB,EAAE;YAC/B,KAAK,EAAE,KAAK;YACZ,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,oFAAoF;YACjG,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,0BAA0B,EAAE;YAChC,KAAK,EAAE,MAAM;YACb,IAAI,EAAE,SAAS;YACf,OAAO,EAAE,IAAI;YACb,WAAW,EAAE,6FAA6F;YAC1G,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,wBAAwB,EAAE;YAC9B,KAAK,EAAE,KAAK;YACZ,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,yJAAyJ;YACtK,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,uBAAuB,EAAE;YAC7B,KAAK,EAAE,KAAK;YACZ,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,+IAA+I;YAC5J,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,WAAW,EAAE;YACjB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC;YACV,WAAW,EAAE,kHAAkH;YAC/H,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,WAAW,EAAE;YACjB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,SAAS;YACf,OAAO,EAAE,KAAK;YACd,WAAW,EAAE,6BAA6B;YAC1C,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,cAAc,EAAE;YACpB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,SAAS;YACf,OAAO,EAAE,KAAK;YACd,WAAW,EAAE,6CAA6C;YAC1D,KAAK,EAAE,WAAW;SACrB,CAAC,CAAC;IACX,CAAC;IACD,KAAK,CAAC,OAAO,CAAC,EACV,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,EAAE,SAAS,EACjF,OAAO,EAAE,WAAW,EAAE,IAAI,EAAE,IAAI,EAChC,IAAI,EAAE,SAAS,EAAE,aAAa,EAAE,uBAAuB,EAAE,wBAAwB,EACjF,sBAAsB,EAAE,qBAAqB,EAAE,SAAS,EACxD,SAAS,EAAE,YAAY,EAC1B;QACG,IAAI;YACA,MAAM,SAAS,CAAC;gBACZ,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,EAAE,SAAS;gBACjF,OAAO,EAAE,WAAW,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,SAAS,EAAE,uBAAuB;gBAC1E,aAAa,EAAE,wBAAwB,EAAE,sBAAsB,EAAE,qBAAqB,EAAE,SAAS;gBACjG,SAAS,EAAE,YAAY;aAC1B,CAAC,CAAC;SACN;QAAC,OAAO,GAAG,EAAE;YACV,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YACnB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACnB;IACL,CAAC;CACJ,CAAC;AAGF,KAAK,UAAU,SAAS,CAAC,EACrB,KAAK,EAAE,QAAQ,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,EAAE,SAAS,EAC3F,OAAO,EAAE,WAAW,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,SAAS,EACjD,uBAAuB,EAAE,aAAa,EAAE,wBAAwB,EAAE,sBAAsB,EAAE,qBAAqB,EAC/G,SAAS,EAAE,SAAS,EAAE,YAAY,EACtB;IACZ,IAAI,SAAS;QACT,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;IAEvD,MAAM,KAAK,GAAG,MAAM,QAAQ,CAAC,WAAW,EAAE;QACtC,QAAQ,EAAE,SAAS;YACf,CAAC,CAAC,aAAa,CAAC,IAAI;YACpB,CAAC,CAAC,aAAa,CAAC,KAAK;KAC5B,CAAC,CAAC;IACH,MAAM,YAAY,GAAG,SAAS,IAAI,IAAI,CAAC;IAEvC,IAAI,UAAU;QACV,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;IAElC,IAAI,UAAU,IAAI,IAAI,IAAI,UAAU,KAAK,EAAE,EAAE;QACzC,IAAI,MAAM,IAAI,IAAI,IAAI,MAAM,KAAK,EAAE;YAC/B,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,2EAA2E,CAAC,CAAC,CAAC;QAE5G,MAAM,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,UAAU,CAAC,EAAE,MAAM,CAAC,CAAC;KAC/E;IAED,IAAI,UAAU,IAAI,IAAI,IAAI,UAAU,KAAK,EAAE,EAAE;QACzC,IAAI,MAAM,IAAI,IAAI,IAAI,MAAM,KAAK,EAAE;YAC/B,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,2EAA2E,CAAC,CAAC,CAAC;QAE5G,MAAM,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,UAAU,CAAC,EAAE,MAAM,CAAC,CAAC;KAC/E;IAED,IAAI,MAAM,IAAI,IAAI,IAAI,MAAM,IAAI,IAAI,EAAE;QAClC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,2EAA2E,CAAC,CAAC,CAAC;QACxG,MAAM,GAAG,SAAS,CAAC;KACtB;IAED,IAAI,SAAS,IAAI,IAAI;QACjB,SAAS,GAAG,WAAW,CAAC;SACvB,IAAI,SAAS,GAAG,WAAW,EAAE;QAC9B,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,0FAA0F,CAAC,CAAC,CAAC;QACvH,SAAS,GAAG,WAAW,CAAC;KAC3B;IAED,IAAI,aAAa,GAAG,MAAM,IAAI,IAAI,CAAC;IACnC,IAAI,aAAa,GAAG,MAAM,IAAI,IAAI,CAAC;IAEnC,MAAM,KAAK,GAAG,MAAM,cAAc,CAAC;QAC/B,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC;QACpC,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC;QACnC,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,sBAAsB,CAAC;KAC3C,EAAE,KAAK,IAAI,EAAE,CAAC,IAAI,UAAU,CAAC;QAC1B,KAAK;QACL,SAAS,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,QAAQ,CAAC;QAChD,SAAS,EAAE,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS;KACvD,CAAC,CAAC,CAAC;IACJ,MAAM,OAAO,GAAG,MAAM,cAAc,CAAC;QACjC,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,kBAAkB,CAAC;QACvC,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,iBAAiB,CAAC;QACtC,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,0BAA0B,CAAC;KAC/C,EAAE,KAAK,IAAI,EAAE,CAAC,IAAI,YAAY,CAAC;QAC5B,KAAK;QACL,WAAW;QACX,SAAS;QACT,OAAO;KACV,CAAC,CAAC,CAAC;IAEJ,MAAM,UAAU,GAAG,IAAI,eAAe,CAAC;QACnC,eAAe,EAAE,OAAO,CAAC,WAAW,EAAE;KACzC,CAAC,CAAC;IAEH,MAAM,IAAI,OAAO,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,mCAAmC;IAEzF,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,eAAe,CAAC,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;IAExE,IAAI,YAAY;QACZ,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC,CAAC;IAExE,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,qBAAqB,CAAC,IAAI,KAAK,CAAC,gBAAgB,EAAE,CAAC,CAAC;IACjF,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,CAAC,eAAe,EAAE,CAAC,CAAC;IACxE,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,IAAI,aAAa,mBAAmB,uBAAuB,UAAU,CAAC,CAAC;IAEtH,IAAI,sBAAsB,IAAI,IAAI;QAC9B,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,2BAA2B,CAAC,IAAI,sBAAsB,EAAE,CAAC,CAAC;IAE3F,IAAI,qBAAqB,IAAI,IAAI;QAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,0BAA0B,CAAC,IAAI,qBAAqB,EAAE,CAAC,CAAC;IAEzF,IAAI,CAAC,wBAAwB;QACzB,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,8BAA8B,CAAC,WAAW,CAAC,CAAC;IAE7E,iDAAiD;IACjD,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC;IAErD,IAAI,CAAC,UAAU,CAAC,eAAe,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,wCAAwC,CAAC,CAAC,CAAC;QACjE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;KACnB;IAED,MAAM,iBAAiB,GAAa,EAAE,CAAC;IACvC,MAAM,iBAAiB,GAAa,EAAE,CAAC;IAEvC,KAAK,UAAU,QAAQ,CAAC,IAAyB;QAC7C,MAAM,EAAE,GAAG,QAAQ,CAAC,eAAe,CAAC;YAChC,KAAK,EAAE,OAAO,CAAC,KAAK;YACpB,MAAM,EAAE,OAAO,CAAC,MAAM;YACtB,OAAO,EAAE,IAAI,KAAK,QAAQ;gBACtB,CAAC,CAAC,iBAAiB,CAAC,KAAK,EAAE;gBAC3B,CAAC,CAAC,iBAAiB,CAAC,KAAK,EAAE;SAClC,CAAC,CAAC;QAEH,MAAM,GAAG,GAAW,MAAM,IAAI,OAAO,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC;QAClG,EAAE,CAAC,KAAK,EAAE,CAAC;QAEX,OAAO,GAAG,CAAC;IACf,CAAC;IAED,iDAAiD;IACjD,OAAO,IAAI,EAAE;QACT,MAAM,WAAW,GAAG,aAAa,IAAI,IAAI;YACrC,CAAC,CAAC,aAAa;YACf,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAE/B,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,aAAa,CAAC,CAAC;YACrD,aAAa,GAAG,IAAI,CAAC;SACxB;;YACG,MAAM,iBAAiB,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QAE9C,IAAI,WAAW,KAAK,OAAO;YACvB,MAAM;QAEV,MAAM,WAAW,GAAG,aAAa,IAAI,IAAI;YACrC,CAAC,CAAC,aAAa;YACf,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAE/B,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,aAAa,CAAC,CAAC;YACrD,aAAa,GAAG,IAAI,CAAC;SACxB;;YACG,MAAM,iBAAiB,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QAE9C,IAAI,WAAW,KAAK,OAAO;YACvB,MAAM;QAEV,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;QAE/C,MAAM,CAAC,UAAU,EAAE,QAAQ,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAEpE,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;QACjC,MAAM,UAAU,CAAC,wBAAwB,CAAC,WAAW,EAAE,WAAW,EAAE;YAChE,WAAW;YACX,IAAI;YACJ,IAAI;YACJ,IAAI;YACJ,aAAa,EAAE;gBACX,OAAO,EAAE,aAAa;gBACtB,gBAAgB,EAAE,sBAAsB,IAAI,IAAI,CAAC,CAAC,CAAC,sBAAsB,CAAC,CAAC,CAAC,SAAS;gBACrF,eAAe,EAAE,qBAAqB,IAAI,IAAI,CAAC,CAAC,CAAC,qBAAqB,CAAC,CAAC,CAAC,SAAS;gBAClF,eAAe,EAAE,wBAAwB;gBACzC,UAAU,EAAE,uBAAuB;aACtC;YACD,SAAS,EAAE,SAAS,KAAK,CAAC,CAAC;gBACvB,CAAC,CAAC,OAAO,CAAC,WAAW;gBACrB,CAAC,CAAC,SAAS,IAAI,CAAC;oBACZ,CAAC,CAAC,SAAS;oBACX,CAAC,CAAC,SAAS;YACnB,OAAO,CAAC,KAAK;gBACT,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;YAClD,CAAC;SACJ,CAAC,CAAC;QACH,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAC/B,OAAO,CAAC,GAAG,EAAE,CAAC;QAEd,IAAI,YAAY;YACZ,MAAM,OAAO,CAAC,YAAY,EAAE,CAAC;KACpC;AACL,CAAC"}
@@ -0,0 +1,8 @@
1
+ import { BinaryPlatform } from "../../bindings/utils/getPlatform.js";
2
+ export declare function logEnabledComputeLayers({ metal, cuda, vulkan }: {
3
+ metal: boolean;
4
+ cuda: boolean;
5
+ vulkan: boolean;
6
+ }, { platform }?: {
7
+ platform?: BinaryPlatform;
8
+ }): void;
@@ -0,0 +1,11 @@
1
+ import chalk from "chalk";
2
+ import { getPlatform } from "../../bindings/utils/getPlatform.js";
3
+ export function logEnabledComputeLayers({ metal, cuda, vulkan }, { platform = getPlatform() } = {}) {
4
+ if (metal && platform === "mac")
5
+ console.log(`${chalk.yellow("Metal:")} enabled`);
6
+ if (cuda)
7
+ console.log(`${chalk.yellow("CUDA:")} enabled`);
8
+ if (vulkan)
9
+ console.log(`${chalk.yellow("Vulkan:")} enabled`);
10
+ }
11
+ //# sourceMappingURL=logEnabledComputeLayers.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"logEnabledComputeLayers.js","sourceRoot":"","sources":["../../../src/cli/utils/logEnabledComputeLayers.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,EAAiB,WAAW,EAAC,MAAM,qCAAqC,CAAC;AAEhF,MAAM,UAAU,uBAAuB,CAAC,EACpC,KAAK,EAAE,IAAI,EAAE,MAAM,EAGtB,EAAE,EACC,QAAQ,GAAG,WAAW,EAAE,KAGxB,EAAE;IACF,IAAI,KAAK,IAAI,QAAQ,KAAK,KAAK;QAC3B,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;IAErD,IAAI,IAAI;QACJ,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IAEpD,IAAI,MAAM;QACN,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;AAC1D,CAAC"}
package/dist/config.d.ts CHANGED
@@ -25,6 +25,7 @@ export declare const defaultLlamaCppGitHubRepo: string;
25
25
  export declare const defaultLlamaCppRelease: string;
26
26
  export declare const defaultLlamaCppMetalSupport: boolean;
27
27
  export declare const defaultLlamaCppCudaSupport: boolean;
28
+ export declare const defaultLlamaCppVulkanSupport: boolean;
28
29
  export declare const defaultLlamaCppDebugLogs: LlamaLogLevel;
29
30
  export declare const defaultSkipDownload: boolean;
30
31
  export declare const defaultXpacksStoreDirectory: string;
package/dist/config.js CHANGED
@@ -46,6 +46,9 @@ export const defaultLlamaCppMetalSupport = env.get("NODE_LLAMA_CPP_METAL")
46
46
  export const defaultLlamaCppCudaSupport = env.get("NODE_LLAMA_CPP_CUDA")
47
47
  .default("false")
48
48
  .asBool();
49
+ export const defaultLlamaCppVulkanSupport = env.get("NODE_LLAMA_CPP_VULKAN")
50
+ .default("false")
51
+ .asBool();
49
52
  export const defaultLlamaCppDebugLogs = env.get("NODE_LLAMA_CPP_LOG_LEVEL")
50
53
  .default(LlamaLogLevel.debug)
51
54
  .asEnum(LlamaLogLevelValues);
@@ -60,8 +63,8 @@ export const defaultXpacksCacheDirectory = env.get("NODE_LLAMA_CPP_XPACKS_CACHE_
60
63
  .asString();
61
64
  export const customCmakeOptionsEnvVarPrefix = "NODE_LLAMA_CPP_CMAKE_OPTION_";
62
65
  export const defaultChatSystemPrompt = "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible.\n" +
63
- "If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. " +
64
- "If you don't know the answer to a question, please don't share false information.";
66
+ "If a question does not make any sense, or is not factually coherent, explain why instead of answering something incorrectly. " +
67
+ "If you don't know the answer to a question, don't share false information.";
65
68
  export const cliBinName = "node-llama-cpp";
66
69
  export const npxRunPrefix = "npx --no ";
67
70
  const documentationUrl = "https://withcatai.github.io/node-llama-cpp";
@@ -1 +1 @@
1
- {"version":3,"file":"config.js","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,aAAa,EAAC,MAAM,KAAK,CAAC;AAClC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AACzB,OAAO,OAAO,MAAM,SAAS,CAAC;AAC9B,OAAO,MAAM,MAAM,SAAS,CAAC;AAC7B,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,EAAC,wBAAwB,EAAC,MAAM,2CAA2C,CAAC;AACnF,OAAO,EAAC,aAAa,EAAE,mBAAmB,EAAC,MAAM,qBAAqB,CAAC;AAEvE,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAE/D,MAAM,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AAGrC,MAAM,CAAC,MAAM,cAAc,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;AAClE,MAAM,CAAC,MAAM,wBAAwB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,YAAY,CAAC,CAAC;AAChF,MAAM,CAAC,MAAM,0BAA0B,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,EAAE,WAAW,CAAC,CAAC;AAClF,MAAM,CAAC,MAAM,4BAA4B,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,aAAa,CAAC,CAAC;AACrF,MAAM,CAAC,MAAM,0BAA0B,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;AAC1F,MAAM,CAAC,MAAM,iBAAiB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;AACxE,MAAM,CAAC,MAAM,yBAAyB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,WAAW,EAAE,UAAU,CAAC,CAAC;AAC5F,MAAM,CAAC,MAAM,qBAAqB,GAAG,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,EAAE,EAAE,gBAAgB,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;AACzF,MAAM,CAAC,MAAM,0BAA0B,GAAG,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,EAAE,mCAAmC,CAAC,CAAC;AACvG,MAAM,CAAC,MAAM,qBAAqB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,gBAAgB,CAAC,CAAC;AACjF,MAAM,CAAC,MAAM,yBAAyB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,4BAA4B,CAAC,CAAC;AACjG,MAAM,CAAC,MAAM,6BAA6B,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,qBAAqB,CAAC,CAAC;AAC9F,MAAM,CAAC,MAAM,2BAA2B,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,mBAAmB,CAAC,CAAC;AAC1F,MAAM,CAAC,MAAM,cAAc,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,OAAO,CAAC,CAAC;AACjE,MAAM,CAAC,MAAM,yBAAyB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,OAAO,CAAC,CAAC;AAC5E,MAAM,CAAC,MAAM,yBAAyB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,OAAO,CAAC,CAAC;AAC5E,MAAM,CAAC,MAAM,qBAAqB,GAAG,qBAAqB,CAAC;AAC3D,MAAM,CAAC,MAAM,UAAU,GAAG,SAAS,CAAC;AACpC,MAAM,CAAC,MAAM,yBAAyB,GAAG,qBAAqB,CAAC;AAC/D,MAAM,CAAC,MAAM,sBAAsB,GAAG,MAAM,wBAAwB,EAAE,CAAC;AAEvE,MAAM,CAAC,MAAM,IAAI,GAAG,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC;KAC5B,OAAO,CAAC,OAAO,CAAC;KAChB,MAAM,EAAE,CAAC;AACd,MAAM,CAAC,MAAM,0BAA0B,GAAG,GAAG,CAAC,GAAG,CAAC,mBAAmB,CAAC;KACjE,OAAO,CAAC,EAAE,CAAC;KACX,QAAQ,EAAE,KAAK,EAAE,CAAC;AACvB,MAAM,CAAC,MAAM,yBAAyB,GAAG,GAAG,CAAC,GAAG,CAAC,qBAAqB,CAAC;KAClE,OAAO,CAAC,yBAAyB,CAAC;KAClC,QAAQ,EAAE,CAAC;AAChB,MAAM,CAAC,MAAM,sBAAsB,GAAG,GAAG,CAAC,GAAG,CAAC,6BAA6B,CAAC;KACvE,OAAO,CAAC,sBAAsB,CAAC;KAC/B,QAAQ,EAAE,CAAC;AAChB,MAAM,CAAC,MAAM,2BAA2B,GAAG,GAAG,CAAC,GAAG,CAAC,sBAAsB,CAAC;KACrE,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,KAAK,QAAQ,IAAI,OAAO,CAAC,IAAI,KAAK,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC;KACrF,MAAM,EAAE,CAAC;AACd,MAAM,CAAC,MAAM,0BAA0B,GAAG,GAAG,CAAC,GAAG,CAAC,qBAAqB,CAAC;KACnE,OAAO,CAAC,OAAO,CAAC;KAChB,MAAM,EAAE,CAAC;AACd,MAAM,CAAC,MAAM,wBAAwB,GAAG,GAAG,CAAC,GAAG,CAAC,0BAA0B,CAAC;KACtE,OAAO,CAAC,aAAa,CAAC,KAAK,CAAC;KAC5B,MAAM,CAAC,mBAAmB,CAAC,CAAC;AACjC,MAAM,CAAC,MAAM,mBAAmB,GAAG,GAAG,CAAC,GAAG,CAAC,8BAA8B,CAAC;KACrE,OAAO,CAAC,OAAO,CAAC;KAChB,MAAM,EAAE,CAAC;AACd,MAAM,CAAC,MAAM,2BAA2B,GAAG,GAAG,CAAC,GAAG,CAAC,oCAAoC,CAAC;KACnF,OAAO,CAAC,yBAAyB,CAAC;KAClC,QAAQ,EAAE,CAAC;AAChB,MAAM,CAAC,MAAM,2BAA2B,GAAG,GAAG,CAAC,GAAG,CAAC,oCAAoC,CAAC;KACnF,OAAO,CAAC,yBAAyB,CAAC;KAClC,QAAQ,EAAE,CAAC;AAChB,MAAM,CAAC,MAAM,8BAA8B,GAAG,8BAA8B,CAAC;AAC7E,MAAM,CAAC,MAAM,uBAAuB,GAAG,+FAA+F;IAClI,+HAA+H;IAC/H,mFAAmF,CAAC;AACxF,MAAM,CAAC,MAAM,UAAU,GAAG,gBAAgB,CAAC;AAC3C,MAAM,CAAC,MAAM,YAAY,GAAG,WAAW,CAAC;AAExC,MAAM,gBAAgB,GAAG,4CAA4C,CAAC;AACtE,MAAM,CAAC,MAAM,qBAAqB,GAAG;IACjC,IAAI,EAAE,gBAAgB,GAAG,aAAa;CAChC,CAAC"}
1
+ {"version":3,"file":"config.js","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,aAAa,EAAC,MAAM,KAAK,CAAC;AAClC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AACzB,OAAO,OAAO,MAAM,SAAS,CAAC;AAC9B,OAAO,MAAM,MAAM,SAAS,CAAC;AAC7B,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,EAAC,wBAAwB,EAAC,MAAM,2CAA2C,CAAC;AACnF,OAAO,EAAC,aAAa,EAAE,mBAAmB,EAAC,MAAM,qBAAqB,CAAC;AAEvE,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAE/D,MAAM,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AAGrC,MAAM,CAAC,MAAM,cAAc,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;AAClE,MAAM,CAAC,MAAM,wBAAwB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,YAAY,CAAC,CAAC;AAChF,MAAM,CAAC,MAAM,0BAA0B,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,EAAE,WAAW,CAAC,CAAC;AAClF,MAAM,CAAC,MAAM,4BAA4B,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,aAAa,CAAC,CAAC;AACrF,MAAM,CAAC,MAAM,0BAA0B,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;AAC1F,MAAM,CAAC,MAAM,iBAAiB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;AACxE,MAAM,CAAC,MAAM,yBAAyB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,WAAW,EAAE,UAAU,CAAC,CAAC;AAC5F,MAAM,CAAC,MAAM,qBAAqB,GAAG,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,EAAE,EAAE,gBAAgB,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;AACzF,MAAM,CAAC,MAAM,0BAA0B,GAAG,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,EAAE,mCAAmC,CAAC,CAAC;AACvG,MAAM,CAAC,MAAM,qBAAqB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,gBAAgB,CAAC,CAAC;AACjF,MAAM,CAAC,MAAM,yBAAyB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,4BAA4B,CAAC,CAAC;AACjG,MAAM,CAAC,MAAM,6BAA6B,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,qBAAqB,CAAC,CAAC;AAC9F,MAAM,CAAC,MAAM,2BAA2B,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,mBAAmB,CAAC,CAAC;AAC1F,MAAM,CAAC,MAAM,cAAc,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,OAAO,CAAC,CAAC;AACjE,MAAM,CAAC,MAAM,yBAAyB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,OAAO,CAAC,CAAC;AAC5E,MAAM,CAAC,MAAM,yBAAyB,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,OAAO,CAAC,CAAC;AAC5E,MAAM,CAAC,MAAM,qBAAqB,GAAG,qBAAqB,CAAC;AAC3D,MAAM,CAAC,MAAM,UAAU,GAAG,SAAS,CAAC;AACpC,MAAM,CAAC,MAAM,yBAAyB,GAAG,qBAAqB,CAAC;AAC/D,MAAM,CAAC,MAAM,sBAAsB,GAAG,MAAM,wBAAwB,EAAE,CAAC;AAEvE,MAAM,CAAC,MAAM,IAAI,GAAG,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC;KAC5B,OAAO,CAAC,OAAO,CAAC;KAChB,MAAM,EAAE,CAAC;AACd,MAAM,CAAC,MAAM,0BAA0B,GAAG,GAAG,CAAC,GAAG,CAAC,mBAAmB,CAAC;KACjE,OAAO,CAAC,EAAE,CAAC;KACX,QAAQ,EAAE,KAAK,EAAE,CAAC;AACvB,MAAM,CAAC,MAAM,yBAAyB,GAAG,GAAG,CAAC,GAAG,CAAC,qBAAqB,CAAC;KAClE,OAAO,CAAC,yBAAyB,CAAC;KAClC,QAAQ,EAAE,CAAC;AAChB,MAAM,CAAC,MAAM,sBAAsB,GAAG,GAAG,CAAC,GAAG,CAAC,6BAA6B,CAAC;KACvE,OAAO,CAAC,sBAAsB,CAAC;KAC/B,QAAQ,EAAE,CAAC;AAChB,MAAM,CAAC,MAAM,2BAA2B,GAAG,GAAG,CAAC,GAAG,CAAC,sBAAsB,CAAC;KACrE,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,KAAK,QAAQ,IAAI,OAAO,CAAC,IAAI,KAAK,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC;KACrF,MAAM,EAAE,CAAC;AACd,MAAM,CAAC,MAAM,0BAA0B,GAAG,GAAG,CAAC,GAAG,CAAC,qBAAqB,CAAC;KACnE,OAAO,CAAC,OAAO,CAAC;KAChB,MAAM,EAAE,CAAC;AACd,MAAM,CAAC,MAAM,4BAA4B,GAAG,GAAG,CAAC,GAAG,CAAC,uBAAuB,CAAC;KACvE,OAAO,CAAC,OAAO,CAAC;KAChB,MAAM,EAAE,CAAC;AACd,MAAM,CAAC,MAAM,wBAAwB,GAAG,GAAG,CAAC,GAAG,CAAC,0BAA0B,CAAC;KACtE,OAAO,CAAC,aAAa,CAAC,KAAK,CAAC;KAC5B,MAAM,CAAC,mBAAmB,CAAC,CAAC;AACjC,MAAM,CAAC,MAAM,mBAAmB,GAAG,GAAG,CAAC,GAAG,CAAC,8BAA8B,CAAC;KACrE,OAAO,CAAC,OAAO,CAAC;KAChB,MAAM,EAAE,CAAC;AACd,MAAM,CAAC,MAAM,2BAA2B,GAAG,GAAG,CAAC,GAAG,CAAC,oCAAoC,CAAC;KACnF,OAAO,CAAC,yBAAyB,CAAC;KAClC,QAAQ,EAAE,CAAC;AAChB,MAAM,CAAC,MAAM,2BAA2B,GAAG,GAAG,CAAC,GAAG,CAAC,oCAAoC,CAAC;KACnF,OAAO,CAAC,yBAAyB,CAAC;KAClC,QAAQ,EAAE,CAAC;AAChB,MAAM,CAAC,MAAM,8BAA8B,GAAG,8BAA8B,CAAC;AAC7E,MAAM,CAAC,MAAM,uBAAuB,GAAG,+FAA+F;IAClI,+HAA+H;IAC/H,4EAA4E,CAAC;AACjF,MAAM,CAAC,MAAM,UAAU,GAAG,gBAAgB,CAAC;AAC3C,MAAM,CAAC,MAAM,YAAY,GAAG,WAAW,CAAC;AAExC,MAAM,gBAAgB,GAAG,4CAA4C,CAAC;AACtE,MAAM,CAAC,MAAM,qBAAqB,GAAG;IACjC,IAAI,EAAE,gBAAgB,GAAG,aAAa;CAChC,CAAC"}
@@ -0,0 +1 @@
1
+ export declare const UNKNOWN_UNICODE_CHAR = "\uFFFD";
package/dist/consts.js ADDED
@@ -0,0 +1,2 @@
1
+ export const UNKNOWN_UNICODE_CHAR = "\ufffd";
2
+ //# sourceMappingURL=consts.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"consts.js","sourceRoot":"","sources":["../src/consts.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,MAAM,oBAAoB,GAAG,QAAQ,CAAC"}
@@ -2,7 +2,7 @@
2
2
  import { EventRelay } from "lifecycle-utils";
3
3
  import { ChatWrapper } from "../../ChatWrapper.js";
4
4
  import { LlamaContextSequence } from "../LlamaContext/LlamaContext.js";
5
- import { ChatHistoryItem, ChatModelFunctions, Token } from "../../types.js";
5
+ import { ChatHistoryItem, ChatModelFunctions, LLamaContextualRepeatPenalty, Token } from "../../types.js";
6
6
  import { GbnfJsonSchemaToType } from "../../utils/gbnfJson/types.js";
7
7
  import { LlamaGrammar } from "../LlamaGrammar.js";
8
8
  import { EvaluationPriority } from "../LlamaContext/types.js";
@@ -61,7 +61,7 @@ export type LLamaChatGenerateResponseOptions<Functions extends ChatModelFunction
61
61
  * Disabled by default.
62
62
  */
63
63
  trimWhitespaceSuffix?: boolean;
64
- repeatPenalty?: false | LLamaChatRepeatPenalty;
64
+ repeatPenalty?: false | LLamaContextualRepeatPenalty;
65
65
  /**
66
66
  * See the parameter `evaluationPriority` on the `LlamaContextSequence.evaluate()` function for more information.
67
67
  */
@@ -119,37 +119,6 @@ export type LLamaChatContextShiftOptions = {
119
119
  */
120
120
  lastEvaluationMetadata?: object | undefined | null;
121
121
  };
122
- export type LLamaChatRepeatPenalty = {
123
- /**
124
- * Number of recent tokens generated by the model to apply penalties to repetition of.
125
- * Defaults to `64`.
126
- */
127
- lastTokens?: number;
128
- punishTokensFilter?: (tokens: Token[]) => Token[];
129
- /**
130
- * Penalize new line tokens.
131
- * Enabled by default.
132
- */
133
- penalizeNewLine?: boolean;
134
- /**
135
- * The relative amount to lower the probability of the tokens in `punishTokens` by
136
- * Defaults to `1.1`.
137
- * Set to `1` to disable.
138
- */
139
- penalty?: number;
140
- /**
141
- * For n time a token is in the `punishTokens` array, lower its probability by `n * frequencyPenalty`
142
- * Disabled by default (`0`).
143
- * Set to a value between `0` and `1` to enable.
144
- */
145
- frequencyPenalty?: number;
146
- /**
147
- * Lower the probability of all the tokens in the `punishTokens` array by `presencePenalty`
148
- * Disabled by default (`0`).
149
- * Set to a value between `0` and `1` to enable.
150
- */
151
- presencePenalty?: number;
152
- };
153
122
  export declare class LlamaChat {
154
123
  readonly onDispose: EventRelay<void>;
155
124
  constructor({ contextSequence, chatWrapper, autoDisposeSequence }: LlamaChatOptions);
@@ -2,9 +2,10 @@ import { DisposeAggregator, DisposedError, EventRelay } from "lifecycle-utils";
2
2
  import { resolveChatWrapper } from "../../utils/resolveChatWrapper.js";
3
3
  import { removeNullFields } from "../../utils/removeNullFields.js";
4
4
  import { LlamaGrammarEvaluationState } from "../LlamaGrammarEvaluationState.js";
5
- import { AbortError } from "../../AbortError.js";
6
5
  import { StopGenerationDetector } from "../../utils/StopGenerationDetector.js";
7
6
  import { TokenStreamRegulator } from "../../utils/TokenStreamRegulator.js";
7
+ import { UNKNOWN_UNICODE_CHAR } from "../../consts.js";
8
+ import { getQueuedTokensBeforeStopTrigger } from "../../utils/getQueuedTokensBeforeStopTrigger.js";
8
9
  import { eraseFirstResponseAndKeepFirstSystemChatContextShiftStrategy } from "./utils/contextShiftStrategies/eraseFirstResponseAndKeepFirstSystemChatContextShiftStrategy.js";
9
10
  import { FunctionCallGrammar, LlamaFunctionCallValidationError } from "./utils/FunctionCallGrammar.js";
10
11
  const defaultContextShiftOptions = {
@@ -12,7 +13,6 @@ const defaultContextShiftOptions = {
12
13
  strategy: "eraseFirstResponseAndKeepFirstSystem",
13
14
  lastEvaluationMetadata: null
14
15
  };
15
- const UNKNOWN_UNICODE_CHAR = "\ufffd";
16
16
  export class LlamaChat {
17
17
  /** @internal */ _chatWrapper;
18
18
  /** @internal */ _disposeAggregator = new DisposeAggregator();
@@ -68,7 +68,7 @@ export class LlamaChat {
68
68
  if (grammar != null && functionsEnabled)
69
69
  throw new Error("Using both grammar and functions is not supported yet");
70
70
  if (signal?.aborted)
71
- throw new AbortError();
71
+ throw signal.reason;
72
72
  if (this._sequence == null)
73
73
  throw new DisposedError();
74
74
  let resolvedHistory = this._sequence.isLoadedToMemory
@@ -122,7 +122,7 @@ export class LlamaChat {
122
122
  let lastHistoryCompressionMetadata = resolvedContextShift.lastEvaluationMetadata;
123
123
  const ensureNotAborted = () => {
124
124
  if (signal?.aborted)
125
- throw new AbortError();
125
+ throw signal.reason;
126
126
  if (this._sequence == null)
127
127
  throw new DisposedError();
128
128
  };
@@ -132,7 +132,7 @@ export class LlamaChat {
132
132
  let punishTokens = res.slice(-repeatPenaltyLastTokens);
133
133
  if (punishTokensFilter != null)
134
134
  punishTokens = punishTokensFilter(punishTokens);
135
- if (!penalizeNewLine) {
135
+ if (penalizeNewLine == null || !penalizeNewLine) {
136
136
  const nlToken = model.tokens.nl;
137
137
  if (nlToken != null)
138
138
  punishTokens = punishTokens.filter(token => token !== nlToken);
@@ -537,27 +537,6 @@ async function compressHistoryToFitContextSize({ history, contextShiftSize, cont
537
537
  metadata
538
538
  };
539
539
  }
540
- function getQueuedTokensBeforeStopTrigger(triggeredStops, partiallyFreeTokens, tokenizer) {
541
- if (partiallyFreeTokens.tokens.length === 0 && partiallyFreeTokens.text.length === 0)
542
- return [];
543
- else if (partiallyFreeTokens.tokens.length !== 0 && partiallyFreeTokens.text.length === 0)
544
- return partiallyFreeTokens.tokens;
545
- else if (partiallyFreeTokens.tokens.length === 0 && partiallyFreeTokens.text.length !== 0)
546
- return tokenizer(partiallyFreeTokens.text);
547
- const triggerThatStartsWithStringIndex = triggeredStops.findIndex((trigger) => trigger.stopTrigger.length > 0 && typeof trigger.stopTrigger[0] === "string");
548
- const triggerThatStartsWithTokenIndex = triggeredStops.findIndex((trigger) => trigger.stopTrigger.length > 0 && typeof trigger.stopTrigger[0] !== "string");
549
- if (triggerThatStartsWithTokenIndex > 0 && triggerThatStartsWithStringIndex < 0)
550
- return partiallyFreeTokens.tokens;
551
- else if (triggerThatStartsWithStringIndex > 0 && triggerThatStartsWithTokenIndex < 0)
552
- return tokenizer(partiallyFreeTokens.text);
553
- const stringTokens = tokenizer(partiallyFreeTokens.text);
554
- if (stringTokens.length === partiallyFreeTokens.tokens.length &&
555
- stringTokens.every((value, index) => value === partiallyFreeTokens.tokens[index]))
556
- return stringTokens;
557
- else if (triggerThatStartsWithStringIndex < triggerThatStartsWithTokenIndex)
558
- return stringTokens;
559
- return partiallyFreeTokens.tokens;
560
- }
561
540
  function getLastTextModelResponseFromChatHistory(chatHistory) {
562
541
  if (chatHistory.length === 0 || chatHistory[chatHistory.length - 1].type !== "model")
563
542
  return "";
@@ -673,9 +652,9 @@ async function getContextWindow({ resolvedHistory, resolvedContextShift, lastHis
673
652
  disengageInitiallyEngagedFunctionCall: functionCall?.disengageInitiallyEngaged ?? []
674
653
  };
675
654
  }
676
- const contextShiftSize = resolvedContextShift.size instanceof Function
655
+ const contextShiftSize = Math.min(context.contextSize, Math.max(1, Math.floor(resolvedContextShift.size instanceof Function
677
656
  ? await resolvedContextShift.size(sequence)
678
- : resolvedContextShift.size;
657
+ : resolvedContextShift.size)));
679
658
  const { compressedHistory, metadata } = await compressHistoryToFitContextSize({
680
659
  history: resolvedHistory,
681
660
  contextShiftSize: Math.max(contextShiftSize, minFreeContextTokens) + pendingTokensCount,