node-llama-cpp 3.0.0-beta.10 → 3.0.0-beta.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. package/dist/bindings/AddonTypes.d.ts +3 -0
  2. package/dist/bindings/getLlama.d.ts +17 -0
  3. package/dist/bindings/getLlama.js +4 -1
  4. package/dist/bindings/getLlama.js.map +1 -1
  5. package/dist/bindings/utils/resolveChatWrapperBasedOnWrapperTypeName.d.ts +26 -0
  6. package/dist/bindings/utils/resolveChatWrapperBasedOnWrapperTypeName.js +43 -0
  7. package/dist/bindings/utils/resolveChatWrapperBasedOnWrapperTypeName.js.map +1 -0
  8. package/dist/cli/cli.js +4 -0
  9. package/dist/cli/cli.js.map +1 -1
  10. package/dist/cli/commands/ChatCommand.d.ts +2 -2
  11. package/dist/cli/commands/ChatCommand.js +3 -39
  12. package/dist/cli/commands/ChatCommand.js.map +1 -1
  13. package/dist/cli/commands/CompleteCommand.d.ts +25 -0
  14. package/dist/cli/commands/CompleteCommand.js +278 -0
  15. package/dist/cli/commands/CompleteCommand.js.map +1 -0
  16. package/dist/cli/commands/InfillCommand.d.ts +27 -0
  17. package/dist/cli/commands/InfillCommand.js +316 -0
  18. package/dist/cli/commands/InfillCommand.js.map +1 -0
  19. package/dist/consts.d.ts +1 -0
  20. package/dist/consts.js +2 -0
  21. package/dist/consts.js.map +1 -0
  22. package/dist/evaluator/LlamaChat/LlamaChat.d.ts +2 -33
  23. package/dist/evaluator/LlamaChat/LlamaChat.js +7 -28
  24. package/dist/evaluator/LlamaChat/LlamaChat.js.map +1 -1
  25. package/dist/evaluator/LlamaChatSession/LlamaChatSession.js +1 -1
  26. package/dist/evaluator/LlamaChatSession/LlamaChatSession.js.map +1 -1
  27. package/dist/evaluator/LlamaCompletion.d.ts +148 -0
  28. package/dist/evaluator/LlamaCompletion.js +402 -0
  29. package/dist/evaluator/LlamaCompletion.js.map +1 -0
  30. package/dist/evaluator/LlamaContext/LlamaContext.js +6 -2
  31. package/dist/evaluator/LlamaContext/LlamaContext.js.map +1 -1
  32. package/dist/evaluator/LlamaModel.d.ts +10 -1
  33. package/dist/evaluator/LlamaModel.js +33 -3
  34. package/dist/evaluator/LlamaModel.js.map +1 -1
  35. package/dist/index.d.ts +6 -4
  36. package/dist/index.js +4 -2
  37. package/dist/index.js.map +1 -1
  38. package/dist/types.d.ts +31 -0
  39. package/dist/utils/UnsupportedError.d.ts +2 -0
  40. package/dist/utils/UnsupportedError.js +7 -0
  41. package/dist/utils/UnsupportedError.js.map +1 -0
  42. package/dist/utils/getQueuedTokensBeforeStopTrigger.d.ts +6 -0
  43. package/dist/utils/getQueuedTokensBeforeStopTrigger.js +22 -0
  44. package/dist/utils/getQueuedTokensBeforeStopTrigger.js.map +1 -0
  45. package/llama/addon.cpp +63 -9
  46. package/llama/binariesGithubRelease.json +1 -1
  47. package/llama/gitRelease.bundle +0 -0
  48. package/llama/llama.cpp.info.json +1 -1
  49. package/llamaBins/linux-arm64/.buildMetadata.json +1 -1
  50. package/llamaBins/linux-arm64/llama-addon.node +0 -0
  51. package/llamaBins/linux-armv7l/.buildMetadata.json +1 -1
  52. package/llamaBins/linux-armv7l/llama-addon.node +0 -0
  53. package/llamaBins/linux-x64/.buildMetadata.json +1 -1
  54. package/llamaBins/linux-x64/llama-addon.node +0 -0
  55. package/llamaBins/linux-x64-cuda/.buildMetadata.json +1 -1
  56. package/llamaBins/linux-x64-cuda/llama-addon.node +0 -0
  57. package/llamaBins/mac-arm64-metal/.buildMetadata.json +1 -1
  58. package/llamaBins/mac-arm64-metal/ggml-metal.metal +378 -6
  59. package/llamaBins/mac-arm64-metal/llama-addon.node +0 -0
  60. package/llamaBins/mac-x64/.buildMetadata.json +1 -1
  61. package/llamaBins/mac-x64/llama-addon.node +0 -0
  62. package/llamaBins/win-x64/.buildMetadata.json +1 -1
  63. package/llamaBins/win-x64/llama-addon.node +0 -0
  64. package/llamaBins/win-x64-cuda/.buildMetadata.json +1 -1
  65. package/llamaBins/win-x64-cuda/llama-addon.node +0 -0
  66. package/package.json +2 -2
  67. package/dist/AbortError.d.ts +0 -2
  68. package/dist/AbortError.js +0 -7
  69. package/dist/AbortError.js.map +0 -1
@@ -0,0 +1,278 @@
1
+ import * as readline from "readline";
2
+ import process from "process";
3
+ import path from "path";
4
+ import chalk from "chalk";
5
+ import fs from "fs-extra";
6
+ import withStatusLogs from "../../utils/withStatusLogs.js";
7
+ import { getLlama } from "../../bindings/getLlama.js";
8
+ import { LlamaModel } from "../../evaluator/LlamaModel.js";
9
+ import { LlamaContext } from "../../evaluator/LlamaContext/LlamaContext.js";
10
+ import { LlamaLogLevel } from "../../bindings/types.js";
11
+ import { LlamaCompletion } from "../../evaluator/LlamaCompletion.js";
12
+ export const CompleteCommand = {
13
+ command: "complete",
14
+ describe: "Generate a completion for a given text",
15
+ builder(yargs) {
16
+ return yargs
17
+ .option("model", {
18
+ alias: "m",
19
+ type: "string",
20
+ demandOption: true,
21
+ description: "Llama model file to use for the chat",
22
+ group: "Required:"
23
+ })
24
+ .option("systemInfo", {
25
+ alias: "i",
26
+ type: "boolean",
27
+ default: false,
28
+ description: "Print llama.cpp system info",
29
+ group: "Optional:"
30
+ })
31
+ .option("text", {
32
+ type: "string",
33
+ description: "First text to automatically start generating completion for",
34
+ group: "Optional:"
35
+ })
36
+ .option("textFile", {
37
+ type: "string",
38
+ description: "Path to a file to load text from and use as the first text to automatically start generating completion for",
39
+ group: "Optional:"
40
+ })
41
+ .option("contextSize", {
42
+ alias: "c",
43
+ type: "number",
44
+ default: 1024 * 4,
45
+ description: "Context size to use for the model context",
46
+ group: "Optional:"
47
+ })
48
+ .option("batchSize", {
49
+ alias: "b",
50
+ type: "number",
51
+ description: "Batch size to use for the model context. The default value is the context size",
52
+ group: "Optional:"
53
+ })
54
+ .option("threads", {
55
+ type: "number",
56
+ default: 6,
57
+ description: "Number of threads to use for the evaluation of tokens",
58
+ group: "Optional:"
59
+ })
60
+ .option("temperature", {
61
+ alias: "t",
62
+ type: "number",
63
+ default: 0,
64
+ description: "Temperature is a hyperparameter that controls the randomness of the generated text. It affects the probability distribution of the model's output tokens. A higher temperature (e.g., 1.5) makes the output more random and creative, while a lower temperature (e.g., 0.5) makes the output more focused, deterministic, and conservative. The suggested temperature is 0.8, which provides a balance between randomness and determinism. At the extreme, a temperature of 0 will always pick the most likely next token, leading to identical outputs in each run. Set to `0` to disable.",
65
+ group: "Optional:"
66
+ })
67
+ .option("minP", {
68
+ alias: "mp",
69
+ type: "number",
70
+ default: 0,
71
+ description: "From the next token candidates, discard the percentage of tokens with the lowest probability. For example, if set to `0.05`, 5% of the lowest probability tokens will be discarded. This is useful for generating more high-quality results when using a high temperature. Set to a value between `0` and `1` to enable. Only relevant when `temperature` is set to a value greater than `0`.",
72
+ group: "Optional:"
73
+ })
74
+ .option("topK", {
75
+ alias: "k",
76
+ type: "number",
77
+ default: 40,
78
+ description: "Limits the model to consider only the K most likely next tokens for sampling at each step of sequence generation. An integer number between `1` and the size of the vocabulary. Set to `0` to disable (which uses the full vocabulary). Only relevant when `temperature` is set to a value greater than 0.",
79
+ group: "Optional:"
80
+ })
81
+ .option("topP", {
82
+ alias: "p",
83
+ type: "number",
84
+ default: 0.95,
85
+ description: "Dynamically selects the smallest set of tokens whose cumulative probability exceeds the threshold P, and samples the next token only from this set. A float number between `0` and `1`. Set to `1` to disable. Only relevant when `temperature` is set to a value greater than `0`.",
86
+ group: "Optional:"
87
+ })
88
+ .option("gpuLayers", {
89
+ alias: "gl",
90
+ type: "number",
91
+ description: "number of layers to store in VRAM",
92
+ group: "Optional:"
93
+ })
94
+ .option("repeatPenalty", {
95
+ alias: "rp",
96
+ type: "number",
97
+ default: 1.1,
98
+ description: "Prevent the model from repeating the same token too much. Set to `1` to disable.",
99
+ group: "Optional:"
100
+ })
101
+ .option("lastTokensRepeatPenalty", {
102
+ alias: "rpn",
103
+ type: "number",
104
+ default: 64,
105
+ description: "Number of recent tokens generated by the model to apply penalties to repetition of",
106
+ group: "Optional:"
107
+ })
108
+ .option("penalizeRepeatingNewLine", {
109
+ alias: "rpnl",
110
+ type: "boolean",
111
+ default: true,
112
+ description: "Penalize new line tokens. set \"--no-penalizeRepeatingNewLine\" or \"--no-rpnl\" to disable",
113
+ group: "Optional:"
114
+ })
115
+ .option("repeatFrequencyPenalty", {
116
+ alias: "rfp",
117
+ type: "number",
118
+ description: "For n time a token is in the `punishTokens` array, lower its probability by `n * repeatFrequencyPenalty`. Set to a value between `0` and `1` to enable.",
119
+ group: "Optional:"
120
+ })
121
+ .option("repeatPresencePenalty", {
122
+ alias: "rpp",
123
+ type: "number",
124
+ description: "Lower the probability of all the tokens in the `punishTokens` array by `repeatPresencePenalty`. Set to a value between `0` and `1` to enable.",
125
+ group: "Optional:"
126
+ })
127
+ .option("maxTokens", {
128
+ alias: "mt",
129
+ type: "number",
130
+ default: 0,
131
+ description: "Maximum number of tokens to generate in responses. Set to `0` to disable. Set to `-1` to set to the context size",
132
+ group: "Optional:"
133
+ })
134
+ .option("noInfoLog", {
135
+ alias: "nl",
136
+ type: "boolean",
137
+ default: false,
138
+ description: "Disable llama.cpp info logs",
139
+ group: "Optional:"
140
+ })
141
+ .option("printTimings", {
142
+ alias: "pt",
143
+ type: "boolean",
144
+ default: false,
145
+ description: "Print llama.cpp timings after each response",
146
+ group: "Optional:"
147
+ });
148
+ },
149
+ async handler({ model, systemInfo, text, textFile, contextSize, batchSize, threads, temperature, minP, topK, topP, gpuLayers, repeatPenalty, lastTokensRepeatPenalty, penalizeRepeatingNewLine, repeatFrequencyPenalty, repeatPresencePenalty, maxTokens, noInfoLog, printTimings }) {
150
+ try {
151
+ await RunCompletion({
152
+ model, systemInfo, text, textFile, contextSize, batchSize,
153
+ threads, temperature, minP, topK, topP, gpuLayers, lastTokensRepeatPenalty,
154
+ repeatPenalty, penalizeRepeatingNewLine, repeatFrequencyPenalty, repeatPresencePenalty, maxTokens,
155
+ noInfoLog, printTimings
156
+ });
157
+ }
158
+ catch (err) {
159
+ console.error(err);
160
+ process.exit(1);
161
+ }
162
+ }
163
+ };
164
+ async function RunCompletion({ model: modelArg, systemInfo, text, textFile, contextSize, batchSize, threads, temperature, minP, topK, topP, gpuLayers, lastTokensRepeatPenalty, repeatPenalty, penalizeRepeatingNewLine, repeatFrequencyPenalty, repeatPresencePenalty, maxTokens, noInfoLog, printTimings }) {
165
+ if (noInfoLog)
166
+ console.info(`${chalk.yellow("Log level:")} warn`);
167
+ const llama = await getLlama("lastBuild", {
168
+ logLevel: noInfoLog
169
+ ? LlamaLogLevel.warn
170
+ : LlamaLogLevel.debug
171
+ });
172
+ const logBatchSize = batchSize != null;
173
+ if (systemInfo)
174
+ console.log(llama.systemInfo);
175
+ if (textFile != null && textFile !== "") {
176
+ if (text != null && text !== "")
177
+ console.warn(chalk.yellow("Both `text` and `textFile` were specified. `textFile` will be used."));
178
+ text = await fs.readFile(path.resolve(process.cwd(), textFile), "utf8");
179
+ }
180
+ if (batchSize == null)
181
+ batchSize = contextSize;
182
+ else if (batchSize > contextSize) {
183
+ console.warn(chalk.yellow("Batch size is greater than the context size. Batch size will be set to the context size."));
184
+ batchSize = contextSize;
185
+ }
186
+ let initialText = text ?? null;
187
+ const model = await withStatusLogs({
188
+ loading: chalk.blue("Loading model"),
189
+ success: chalk.blue("Model loaded"),
190
+ fail: chalk.blue("Failed to load model")
191
+ }, async () => new LlamaModel({
192
+ llama,
193
+ modelPath: path.resolve(process.cwd(), modelArg),
194
+ gpuLayers: gpuLayers != null ? gpuLayers : undefined
195
+ }));
196
+ const context = await withStatusLogs({
197
+ loading: chalk.blue("Creating context"),
198
+ success: chalk.blue("Context created"),
199
+ fail: chalk.blue("Failed to create context")
200
+ }, async () => new LlamaContext({
201
+ model,
202
+ contextSize,
203
+ batchSize,
204
+ threads
205
+ }));
206
+ const completion = new LlamaCompletion({
207
+ contextSequence: context.getSequence()
208
+ });
209
+ await new Promise((accept) => setTimeout(accept, 0)); // wait for logs to finish printing
210
+ console.info(`${chalk.yellow("Context size:")} ${context.contextSize}`);
211
+ if (logBatchSize)
212
+ console.info(`${chalk.yellow("Batch size:")} ${context.batchSize}`);
213
+ console.info(`${chalk.yellow("Train context size:")} ${model.trainContextSize}`);
214
+ console.info(`${chalk.yellow("Model type:")} ${model.typeDescription}`);
215
+ console.info(`${chalk.yellow("Repeat penalty:")} ${repeatPenalty} (apply to last ${lastTokensRepeatPenalty} tokens)`);
216
+ if (repeatFrequencyPenalty != null)
217
+ console.info(`${chalk.yellow("Repeat frequency penalty:")} ${repeatFrequencyPenalty}`);
218
+ if (repeatPresencePenalty != null)
219
+ console.info(`${chalk.yellow("Repeat presence penalty:")} ${repeatPresencePenalty}`);
220
+ if (!penalizeRepeatingNewLine)
221
+ console.info(`${chalk.yellow("Penalize repeating new line:")} disabled`);
222
+ // this is for ora to not interfere with readline
223
+ await new Promise(resolve => setTimeout(resolve, 1));
224
+ const replHistory = [];
225
+ async function getPrompt() {
226
+ const rl = readline.createInterface({
227
+ input: process.stdin,
228
+ output: process.stdout,
229
+ history: replHistory.slice()
230
+ });
231
+ const res = await new Promise((accept) => rl.question(chalk.yellow("> "), accept));
232
+ rl.close();
233
+ return res;
234
+ }
235
+ // eslint-disable-next-line no-constant-condition
236
+ while (true) {
237
+ const input = initialText != null
238
+ ? initialText
239
+ : await getPrompt();
240
+ if (initialText != null) {
241
+ console.log(chalk.green("> ") + initialText);
242
+ initialText = null;
243
+ }
244
+ else
245
+ await replHistory.push(input);
246
+ if (input === ".exit")
247
+ break;
248
+ process.stdout.write(chalk.yellow("Completion: "));
249
+ const [startColor, endColor] = chalk.blue("MIDDLE").split("MIDDLE");
250
+ process.stdout.write(startColor);
251
+ await completion.generateCompletion(input, {
252
+ temperature,
253
+ minP,
254
+ topK,
255
+ topP,
256
+ repeatPenalty: {
257
+ penalty: repeatPenalty,
258
+ frequencyPenalty: repeatFrequencyPenalty != null ? repeatFrequencyPenalty : undefined,
259
+ presencePenalty: repeatPresencePenalty != null ? repeatPresencePenalty : undefined,
260
+ penalizeNewLine: penalizeRepeatingNewLine,
261
+ lastTokens: lastTokensRepeatPenalty
262
+ },
263
+ maxTokens: maxTokens === -1
264
+ ? context.contextSize
265
+ : maxTokens <= 0
266
+ ? undefined
267
+ : maxTokens,
268
+ onToken(chunk) {
269
+ process.stdout.write(model.detokenize(chunk));
270
+ }
271
+ });
272
+ process.stdout.write(endColor);
273
+ console.log();
274
+ if (printTimings)
275
+ await context.printTimings();
276
+ }
277
+ }
278
+ //# sourceMappingURL=CompleteCommand.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"CompleteCommand.js","sourceRoot":"","sources":["../../../src/cli/commands/CompleteCommand.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,QAAQ,MAAM,UAAU,CAAC;AACrC,OAAO,OAAO,MAAM,SAAS,CAAC;AAC9B,OAAO,IAAI,MAAM,MAAM,CAAC;AAExB,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,EAAE,MAAM,UAAU,CAAC;AAC1B,OAAO,cAAc,MAAM,+BAA+B,CAAC;AAC3D,OAAO,EAAC,QAAQ,EAAC,MAAM,4BAA4B,CAAC;AACpD,OAAO,EAAC,UAAU,EAAC,MAAM,+BAA+B,CAAC;AACzD,OAAO,EAAC,YAAY,EAAC,MAAM,8CAA8C,CAAC;AAC1E,OAAO,EAAC,aAAa,EAAC,MAAM,yBAAyB,CAAC;AACtD,OAAO,EAAC,eAAe,EAAC,MAAM,oCAAoC,CAAC;AAyBnE,MAAM,CAAC,MAAM,eAAe,GAA2C;IACnE,OAAO,EAAE,UAAU;IACnB,QAAQ,EAAE,wCAAwC;IAClD,OAAO,CAAC,KAAK;QACT,OAAO,KAAK;aACP,MAAM,CAAC,OAAO,EAAE;YACb,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,YAAY,EAAE,IAAI;YAClB,WAAW,EAAE,sCAAsC;YACnD,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,YAAY,EAAE;YAClB,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,SAAS;YACf,OAAO,EAAE,KAAK;YACd,WAAW,EAAE,6BAA6B;YAC1C,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,MAAM,EAAE;YACZ,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,6DAA6D;YAC1E,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,UAAU,EAAE;YAChB,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,6GAA6G;YAC1H,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,aAAa,EAAE;YACnB,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,IAAI,GAAG,CAAC;YACjB,WAAW,EAAE,2CAA2C;YACxD,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,WAAW,EAAE;YACjB,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,gFAAgF;YAC7F,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,SAAS,EAAE;YACf,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC;YACV,WAAW,EAAE,uDAAuD;YACpE,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,aAAa,EAAE;YACnB,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC;YACV,WAAW,EAAE,6jBAA6jB;YAC1kB,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,MAAM,EAAE;YACZ,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC;YACV,WAAW,EAAE,+XAA+X;YAC5Y,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,MAAM,EAAE;YACZ,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,4SAA4S;YACzT,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,MAAM,EAAE;YACZ,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,IAAI;YACb,WAAW,EAAE,qRAAqR;YAClS,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,WAAW,EAAE;YACjB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,mCAAmC;YAChD,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,eAAe,EAAE;YACrB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,GAAG;YACZ,WAAW,EAAE,kFAAkF;YAC/F,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,yBAAyB,EAAE;YAC/B,KAAK,EAAE,KAAK;YACZ,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,oFAAoF;YACjG,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,0BAA0B,EAAE;YAChC,KAAK,EAAE,MAAM;YACb,IAAI,EAAE,SAAS;YACf,OAAO,EAAE,IAAI;YACb,WAAW,EAAE,6FAA6F;YAC1G,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,wBAAwB,EAAE;YAC9B,KAAK,EAAE,KAAK;YACZ,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,yJAAyJ;YACtK,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,uBAAuB,EAAE;YAC7B,KAAK,EAAE,KAAK;YACZ,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,+IAA+I;YAC5J,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,WAAW,EAAE;YACjB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC;YACV,WAAW,EAAE,kHAAkH;YAC/H,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,WAAW,EAAE;YACjB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,SAAS;YACf,OAAO,EAAE,KAAK;YACd,WAAW,EAAE,6BAA6B;YAC1C,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,cAAc,EAAE;YACpB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,SAAS;YACf,OAAO,EAAE,KAAK;YACd,WAAW,EAAE,6CAA6C;YAC1D,KAAK,EAAE,WAAW;SACrB,CAAC,CAAC;IACX,CAAC;IACD,KAAK,CAAC,OAAO,CAAC,EACV,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,SAAS,EACzD,OAAO,EAAE,WAAW,EAAE,IAAI,EAAE,IAAI,EAChC,IAAI,EAAE,SAAS,EAAE,aAAa,EAAE,uBAAuB,EAAE,wBAAwB,EACjF,sBAAsB,EAAE,qBAAqB,EAAE,SAAS,EACxD,SAAS,EAAE,YAAY,EAC1B;QACG,IAAI;YACA,MAAM,aAAa,CAAC;gBAChB,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,SAAS;gBACzD,OAAO,EAAE,WAAW,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,SAAS,EAAE,uBAAuB;gBAC1E,aAAa,EAAE,wBAAwB,EAAE,sBAAsB,EAAE,qBAAqB,EAAE,SAAS;gBACjG,SAAS,EAAE,YAAY;aAC1B,CAAC,CAAC;SACN;QAAC,OAAO,GAAG,EAAE;YACV,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YACnB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACnB;IACL,CAAC;CACJ,CAAC;AAGF,KAAK,UAAU,aAAa,CAAC,EACzB,KAAK,EAAE,QAAQ,EAAE,UAAU,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,SAAS,EACnE,OAAO,EAAE,WAAW,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,SAAS,EACjD,uBAAuB,EAAE,aAAa,EAAE,wBAAwB,EAAE,sBAAsB,EAAE,qBAAqB,EAC/G,SAAS,EAAE,SAAS,EAAE,YAAY,EACpB;IACd,IAAI,SAAS;QACT,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;IAEvD,MAAM,KAAK,GAAG,MAAM,QAAQ,CAAC,WAAW,EAAE;QACtC,QAAQ,EAAE,SAAS;YACf,CAAC,CAAC,aAAa,CAAC,IAAI;YACpB,CAAC,CAAC,aAAa,CAAC,KAAK;KAC5B,CAAC,CAAC;IACH,MAAM,YAAY,GAAG,SAAS,IAAI,IAAI,CAAC;IAEvC,IAAI,UAAU;QACV,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;IAElC,IAAI,QAAQ,IAAI,IAAI,IAAI,QAAQ,KAAK,EAAE,EAAE;QACrC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,KAAK,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,qEAAqE,CAAC,CAAC,CAAC;QAEtG,IAAI,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,QAAQ,CAAC,EAAE,MAAM,CAAC,CAAC;KAC3E;IAED,IAAI,SAAS,IAAI,IAAI;QACjB,SAAS,GAAG,WAAW,CAAC;SACvB,IAAI,SAAS,GAAG,WAAW,EAAE;QAC9B,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,0FAA0F,CAAC,CAAC,CAAC;QACvH,SAAS,GAAG,WAAW,CAAC;KAC3B;IAED,IAAI,WAAW,GAAG,IAAI,IAAI,IAAI,CAAC;IAC/B,MAAM,KAAK,GAAG,MAAM,cAAc,CAAC;QAC/B,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC;QACpC,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC;QACnC,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,sBAAsB,CAAC;KAC3C,EAAE,KAAK,IAAI,EAAE,CAAC,IAAI,UAAU,CAAC;QAC1B,KAAK;QACL,SAAS,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,QAAQ,CAAC;QAChD,SAAS,EAAE,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS;KACvD,CAAC,CAAC,CAAC;IACJ,MAAM,OAAO,GAAG,MAAM,cAAc,CAAC;QACjC,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,kBAAkB,CAAC;QACvC,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,iBAAiB,CAAC;QACtC,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,0BAA0B,CAAC;KAC/C,EAAE,KAAK,IAAI,EAAE,CAAC,IAAI,YAAY,CAAC;QAC5B,KAAK;QACL,WAAW;QACX,SAAS;QACT,OAAO;KACV,CAAC,CAAC,CAAC;IAEJ,MAAM,UAAU,GAAG,IAAI,eAAe,CAAC;QACnC,eAAe,EAAE,OAAO,CAAC,WAAW,EAAE;KACzC,CAAC,CAAC;IAEH,MAAM,IAAI,OAAO,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,mCAAmC;IAEzF,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,eAAe,CAAC,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;IAExE,IAAI,YAAY;QACZ,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC,CAAC;IAExE,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,qBAAqB,CAAC,IAAI,KAAK,CAAC,gBAAgB,EAAE,CAAC,CAAC;IACjF,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,CAAC,eAAe,EAAE,CAAC,CAAC;IACxE,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,IAAI,aAAa,mBAAmB,uBAAuB,UAAU,CAAC,CAAC;IAEtH,IAAI,sBAAsB,IAAI,IAAI;QAC9B,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,2BAA2B,CAAC,IAAI,sBAAsB,EAAE,CAAC,CAAC;IAE3F,IAAI,qBAAqB,IAAI,IAAI;QAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,0BAA0B,CAAC,IAAI,qBAAqB,EAAE,CAAC,CAAC;IAEzF,IAAI,CAAC,wBAAwB;QACzB,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,8BAA8B,CAAC,WAAW,CAAC,CAAC;IAE7E,iDAAiD;IACjD,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC;IAErD,MAAM,WAAW,GAAa,EAAE,CAAC;IAEjC,KAAK,UAAU,SAAS;QACpB,MAAM,EAAE,GAAG,QAAQ,CAAC,eAAe,CAAC;YAChC,KAAK,EAAE,OAAO,CAAC,KAAK;YACpB,MAAM,EAAE,OAAO,CAAC,MAAM;YACtB,OAAO,EAAE,WAAW,CAAC,KAAK,EAAE;SAC/B,CAAC,CAAC;QAEH,MAAM,GAAG,GAAW,MAAM,IAAI,OAAO,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC;QAC3F,EAAE,CAAC,KAAK,EAAE,CAAC;QAEX,OAAO,GAAG,CAAC;IACf,CAAC;IAED,iDAAiD;IACjD,OAAO,IAAI,EAAE;QACT,MAAM,KAAK,GAAG,WAAW,IAAI,IAAI;YAC7B,CAAC,CAAC,WAAW;YACb,CAAC,CAAC,MAAM,SAAS,EAAE,CAAC;QAExB,IAAI,WAAW,IAAI,IAAI,EAAE;YACrB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,CAAC;YAC7C,WAAW,GAAG,IAAI,CAAC;SACtB;;YACG,MAAM,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAElC,IAAI,KAAK,KAAK,OAAO;YACjB,MAAM;QAEV,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC;QAEnD,MAAM,CAAC,UAAU,EAAE,QAAQ,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAEpE,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;QACjC,MAAM,UAAU,CAAC,kBAAkB,CAAC,KAAK,EAAE;YACvC,WAAW;YACX,IAAI;YACJ,IAAI;YACJ,IAAI;YACJ,aAAa,EAAE;gBACX,OAAO,EAAE,aAAa;gBACtB,gBAAgB,EAAE,sBAAsB,IAAI,IAAI,CAAC,CAAC,CAAC,sBAAsB,CAAC,CAAC,CAAC,SAAS;gBACrF,eAAe,EAAE,qBAAqB,IAAI,IAAI,CAAC,CAAC,CAAC,qBAAqB,CAAC,CAAC,CAAC,SAAS;gBAClF,eAAe,EAAE,wBAAwB;gBACzC,UAAU,EAAE,uBAAuB;aACtC;YACD,SAAS,EAAE,SAAS,KAAK,CAAC,CAAC;gBACvB,CAAC,CAAC,OAAO,CAAC,WAAW;gBACrB,CAAC,CAAC,SAAS,IAAI,CAAC;oBACZ,CAAC,CAAC,SAAS;oBACX,CAAC,CAAC,SAAS;YACnB,OAAO,CAAC,KAAK;gBACT,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;YAClD,CAAC;SACJ,CAAC,CAAC;QACH,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAC/B,OAAO,CAAC,GAAG,EAAE,CAAC;QAEd,IAAI,YAAY;YACZ,MAAM,OAAO,CAAC,YAAY,EAAE,CAAC;KACpC;AACL,CAAC"}
@@ -0,0 +1,27 @@
1
+ import { CommandModule } from "yargs";
2
+ type InfillCommand = {
3
+ model: string;
4
+ systemInfo: boolean;
5
+ prefix?: string;
6
+ prefixFile?: string;
7
+ suffix?: string;
8
+ suffixFile?: string;
9
+ contextSize: number;
10
+ batchSize?: number;
11
+ threads: number;
12
+ temperature: number;
13
+ minP: number;
14
+ topK: number;
15
+ topP: number;
16
+ gpuLayers?: number;
17
+ repeatPenalty: number;
18
+ lastTokensRepeatPenalty: number;
19
+ penalizeRepeatingNewLine: boolean;
20
+ repeatFrequencyPenalty?: number;
21
+ repeatPresencePenalty?: number;
22
+ maxTokens: number;
23
+ noInfoLog: boolean;
24
+ printTimings: boolean;
25
+ };
26
+ export declare const InfillCommand: CommandModule<object, InfillCommand>;
27
+ export {};
@@ -0,0 +1,316 @@
1
+ import * as readline from "readline";
2
+ import process from "process";
3
+ import path from "path";
4
+ import chalk from "chalk";
5
+ import fs from "fs-extra";
6
+ import withStatusLogs from "../../utils/withStatusLogs.js";
7
+ import { getLlama } from "../../bindings/getLlama.js";
8
+ import { LlamaModel } from "../../evaluator/LlamaModel.js";
9
+ import { LlamaContext } from "../../evaluator/LlamaContext/LlamaContext.js";
10
+ import { LlamaLogLevel } from "../../bindings/types.js";
11
+ import { LlamaCompletion } from "../../evaluator/LlamaCompletion.js";
12
+ export const InfillCommand = {
13
+ command: "infill",
14
+ describe: "Generate an infill completion for a given suffix and prefix texts",
15
+ builder(yargs) {
16
+ return yargs
17
+ .option("model", {
18
+ alias: "m",
19
+ type: "string",
20
+ demandOption: true,
21
+ description: "Llama model file to use for the chat",
22
+ group: "Required:"
23
+ })
24
+ .option("systemInfo", {
25
+ alias: "i",
26
+ type: "boolean",
27
+ default: false,
28
+ description: "Print llama.cpp system info",
29
+ group: "Optional:"
30
+ })
31
+ .option("prefix", {
32
+ type: "string",
33
+ description: "First prefix text to automatically load",
34
+ group: "Optional:"
35
+ })
36
+ .option("prefixFile", {
37
+ type: "string",
38
+ description: "Path to a file to load prefix text from automatically",
39
+ group: "Optional:"
40
+ })
41
+ .option("suffix", {
42
+ type: "string",
43
+ description: "First suffix text to automatically load. Requires `prefix` or `prefixFile` to be set",
44
+ group: "Optional:"
45
+ })
46
+ .option("suffixFile", {
47
+ type: "string",
48
+ description: "Path to a file to load suffix text from automatically. Requires `prefix` or `prefixFile` to be set",
49
+ group: "Optional:"
50
+ })
51
+ .option("contextSize", {
52
+ alias: "c",
53
+ type: "number",
54
+ default: 1024 * 4,
55
+ description: "Context size to use for the model context",
56
+ group: "Optional:"
57
+ })
58
+ .option("batchSize", {
59
+ alias: "b",
60
+ type: "number",
61
+ description: "Batch size to use for the model context. The default value is the context size",
62
+ group: "Optional:"
63
+ })
64
+ .option("threads", {
65
+ type: "number",
66
+ default: 6,
67
+ description: "Number of threads to use for the evaluation of tokens",
68
+ group: "Optional:"
69
+ })
70
+ .option("temperature", {
71
+ alias: "t",
72
+ type: "number",
73
+ default: 0,
74
+ description: "Temperature is a hyperparameter that controls the randomness of the generated text. It affects the probability distribution of the model's output tokens. A higher temperature (e.g., 1.5) makes the output more random and creative, while a lower temperature (e.g., 0.5) makes the output more focused, deterministic, and conservative. The suggested temperature is 0.8, which provides a balance between randomness and determinism. At the extreme, a temperature of 0 will always pick the most likely next token, leading to identical outputs in each run. Set to `0` to disable.",
75
+ group: "Optional:"
76
+ })
77
+ .option("minP", {
78
+ alias: "mp",
79
+ type: "number",
80
+ default: 0,
81
+ description: "From the next token candidates, discard the percentage of tokens with the lowest probability. For example, if set to `0.05`, 5% of the lowest probability tokens will be discarded. This is useful for generating more high-quality results when using a high temperature. Set to a value between `0` and `1` to enable. Only relevant when `temperature` is set to a value greater than `0`.",
82
+ group: "Optional:"
83
+ })
84
+ .option("topK", {
85
+ alias: "k",
86
+ type: "number",
87
+ default: 40,
88
+ description: "Limits the model to consider only the K most likely next tokens for sampling at each step of sequence generation. An integer number between `1` and the size of the vocabulary. Set to `0` to disable (which uses the full vocabulary). Only relevant when `temperature` is set to a value greater than 0.",
89
+ group: "Optional:"
90
+ })
91
+ .option("topP", {
92
+ alias: "p",
93
+ type: "number",
94
+ default: 0.95,
95
+ description: "Dynamically selects the smallest set of tokens whose cumulative probability exceeds the threshold P, and samples the next token only from this set. A float number between `0` and `1`. Set to `1` to disable. Only relevant when `temperature` is set to a value greater than `0`.",
96
+ group: "Optional:"
97
+ })
98
+ .option("gpuLayers", {
99
+ alias: "gl",
100
+ type: "number",
101
+ description: "number of layers to store in VRAM",
102
+ group: "Optional:"
103
+ })
104
+ .option("repeatPenalty", {
105
+ alias: "rp",
106
+ type: "number",
107
+ default: 1.1,
108
+ description: "Prevent the model from repeating the same token too much. Set to `1` to disable.",
109
+ group: "Optional:"
110
+ })
111
+ .option("lastTokensRepeatPenalty", {
112
+ alias: "rpn",
113
+ type: "number",
114
+ default: 64,
115
+ description: "Number of recent tokens generated by the model to apply penalties to repetition of",
116
+ group: "Optional:"
117
+ })
118
+ .option("penalizeRepeatingNewLine", {
119
+ alias: "rpnl",
120
+ type: "boolean",
121
+ default: true,
122
+ description: "Penalize new line tokens. set \"--no-penalizeRepeatingNewLine\" or \"--no-rpnl\" to disable",
123
+ group: "Optional:"
124
+ })
125
+ .option("repeatFrequencyPenalty", {
126
+ alias: "rfp",
127
+ type: "number",
128
+ description: "For n time a token is in the `punishTokens` array, lower its probability by `n * repeatFrequencyPenalty`. Set to a value between `0` and `1` to enable.",
129
+ group: "Optional:"
130
+ })
131
+ .option("repeatPresencePenalty", {
132
+ alias: "rpp",
133
+ type: "number",
134
+ description: "Lower the probability of all the tokens in the `punishTokens` array by `repeatPresencePenalty`. Set to a value between `0` and `1` to enable.",
135
+ group: "Optional:"
136
+ })
137
+ .option("maxTokens", {
138
+ alias: "mt",
139
+ type: "number",
140
+ default: 0,
141
+ description: "Maximum number of tokens to generate in responses. Set to `0` to disable. Set to `-1` to set to the context size",
142
+ group: "Optional:"
143
+ })
144
+ .option("noInfoLog", {
145
+ alias: "nl",
146
+ type: "boolean",
147
+ default: false,
148
+ description: "Disable llama.cpp info logs",
149
+ group: "Optional:"
150
+ })
151
+ .option("printTimings", {
152
+ alias: "pt",
153
+ type: "boolean",
154
+ default: false,
155
+ description: "Print llama.cpp timings after each response",
156
+ group: "Optional:"
157
+ });
158
+ },
159
+ async handler({ model, systemInfo, prefix, prefixFile, suffix, suffixFile, contextSize, batchSize, threads, temperature, minP, topK, topP, gpuLayers, repeatPenalty, lastTokensRepeatPenalty, penalizeRepeatingNewLine, repeatFrequencyPenalty, repeatPresencePenalty, maxTokens, noInfoLog, printTimings }) {
160
+ try {
161
+ await RunInfill({
162
+ model, systemInfo, prefix, prefixFile, suffix, suffixFile, contextSize, batchSize,
163
+ threads, temperature, minP, topK, topP, gpuLayers, lastTokensRepeatPenalty,
164
+ repeatPenalty, penalizeRepeatingNewLine, repeatFrequencyPenalty, repeatPresencePenalty, maxTokens,
165
+ noInfoLog, printTimings
166
+ });
167
+ }
168
+ catch (err) {
169
+ console.error(err);
170
+ process.exit(1);
171
+ }
172
+ }
173
+ };
174
+ async function RunInfill({ model: modelArg, systemInfo, prefix, prefixFile, suffix, suffixFile, contextSize, batchSize, threads, temperature, minP, topK, topP, gpuLayers, lastTokensRepeatPenalty, repeatPenalty, penalizeRepeatingNewLine, repeatFrequencyPenalty, repeatPresencePenalty, maxTokens, noInfoLog, printTimings }) {
175
+ if (noInfoLog)
176
+ console.info(`${chalk.yellow("Log level:")} warn`);
177
+ const llama = await getLlama("lastBuild", {
178
+ logLevel: noInfoLog
179
+ ? LlamaLogLevel.warn
180
+ : LlamaLogLevel.debug
181
+ });
182
+ const logBatchSize = batchSize != null;
183
+ if (systemInfo)
184
+ console.log(llama.systemInfo);
185
+ if (prefixFile != null && prefixFile !== "") {
186
+ if (prefix != null && prefix !== "")
187
+ console.warn(chalk.yellow("Both `prefix` and `prefixFile` were specified. `prefixFile` will be used."));
188
+ prefix = await fs.readFile(path.resolve(process.cwd(), prefixFile), "utf8");
189
+ }
190
+ if (suffixFile != null && suffixFile !== "") {
191
+ if (suffix != null && suffix !== "")
192
+ console.warn(chalk.yellow("Both `suffix` and `suffixFile` were specified. `suffixFile` will be used."));
193
+ suffix = await fs.readFile(path.resolve(process.cwd(), suffixFile), "utf8");
194
+ }
195
+ if (suffix != null && prefix == null) {
196
+ console.warn(chalk.yellow("Suffix was specified but no prefix was specified. Suffix will be ignored."));
197
+ suffix = undefined;
198
+ }
199
+ if (batchSize == null)
200
+ batchSize = contextSize;
201
+ else if (batchSize > contextSize) {
202
+ console.warn(chalk.yellow("Batch size is greater than the context size. Batch size will be set to the context size."));
203
+ batchSize = contextSize;
204
+ }
205
+ let initialPrefix = prefix ?? null;
206
+ let initialSuffix = suffix ?? null;
207
+ const model = await withStatusLogs({
208
+ loading: chalk.blue("Loading model"),
209
+ success: chalk.blue("Model loaded"),
210
+ fail: chalk.blue("Failed to load model")
211
+ }, async () => new LlamaModel({
212
+ llama,
213
+ modelPath: path.resolve(process.cwd(), modelArg),
214
+ gpuLayers: gpuLayers != null ? gpuLayers : undefined
215
+ }));
216
+ const context = await withStatusLogs({
217
+ loading: chalk.blue("Creating context"),
218
+ success: chalk.blue("Context created"),
219
+ fail: chalk.blue("Failed to create context")
220
+ }, async () => new LlamaContext({
221
+ model,
222
+ contextSize,
223
+ batchSize,
224
+ threads
225
+ }));
226
+ const completion = new LlamaCompletion({
227
+ contextSequence: context.getSequence()
228
+ });
229
+ await new Promise((accept) => setTimeout(accept, 0)); // wait for logs to finish printing
230
+ console.info(`${chalk.yellow("Context size:")} ${context.contextSize}`);
231
+ if (logBatchSize)
232
+ console.info(`${chalk.yellow("Batch size:")} ${context.batchSize}`);
233
+ console.info(`${chalk.yellow("Train context size:")} ${model.trainContextSize}`);
234
+ console.info(`${chalk.yellow("Model type:")} ${model.typeDescription}`);
235
+ console.info(`${chalk.yellow("Repeat penalty:")} ${repeatPenalty} (apply to last ${lastTokensRepeatPenalty} tokens)`);
236
+ if (repeatFrequencyPenalty != null)
237
+ console.info(`${chalk.yellow("Repeat frequency penalty:")} ${repeatFrequencyPenalty}`);
238
+ if (repeatPresencePenalty != null)
239
+ console.info(`${chalk.yellow("Repeat presence penalty:")} ${repeatPresencePenalty}`);
240
+ if (!penalizeRepeatingNewLine)
241
+ console.info(`${chalk.yellow("Penalize repeating new line:")} disabled`);
242
+ // this is for ora to not interfere with readline
243
+ await new Promise(resolve => setTimeout(resolve, 1));
244
+ if (!completion.infillSupported) {
245
+ console.log(chalk.red("Infill is not supported for this model"));
246
+ process.exit(1);
247
+ }
248
+ const replPrefixHistory = [];
249
+ const replSuffixHistory = [];
250
+ async function getInput(name) {
251
+ const rl = readline.createInterface({
252
+ input: process.stdin,
253
+ output: process.stdout,
254
+ history: name === "Prefix"
255
+ ? replPrefixHistory.slice()
256
+ : replSuffixHistory.slice()
257
+ });
258
+ const res = await new Promise((accept) => rl.question(chalk.yellow(name + "> "), accept));
259
+ rl.close();
260
+ return res;
261
+ }
262
+ // eslint-disable-next-line no-constant-condition
263
+ while (true) {
264
+ const prefixInput = initialPrefix != null
265
+ ? initialPrefix
266
+ : await getInput("Prefix");
267
+ if (initialPrefix != null) {
268
+ console.log(chalk.green("Prefix> ") + initialPrefix);
269
+ initialPrefix = null;
270
+ }
271
+ else
272
+ await replPrefixHistory.push(prefixInput);
273
+ if (prefixInput === ".exit")
274
+ break;
275
+ const suffixInput = initialSuffix != null
276
+ ? initialSuffix
277
+ : await getInput("Suffix");
278
+ if (initialSuffix != null) {
279
+ console.log(chalk.green("Suffix> ") + initialSuffix);
280
+ initialSuffix = null;
281
+ }
282
+ else
283
+ await replSuffixHistory.push(suffixInput);
284
+ if (suffixInput === ".exit")
285
+ break;
286
+ process.stdout.write(chalk.yellow("Infill: "));
287
+ const [startColor, endColor] = chalk.blue("MIDDLE").split("MIDDLE");
288
+ process.stdout.write(startColor);
289
+ await completion.generateInfillCompletion(prefixInput, suffixInput, {
290
+ temperature,
291
+ minP,
292
+ topK,
293
+ topP,
294
+ repeatPenalty: {
295
+ penalty: repeatPenalty,
296
+ frequencyPenalty: repeatFrequencyPenalty != null ? repeatFrequencyPenalty : undefined,
297
+ presencePenalty: repeatPresencePenalty != null ? repeatPresencePenalty : undefined,
298
+ penalizeNewLine: penalizeRepeatingNewLine,
299
+ lastTokens: lastTokensRepeatPenalty
300
+ },
301
+ maxTokens: maxTokens === -1
302
+ ? context.contextSize
303
+ : maxTokens <= 0
304
+ ? undefined
305
+ : maxTokens,
306
+ onToken(chunk) {
307
+ process.stdout.write(model.detokenize(chunk));
308
+ }
309
+ });
310
+ process.stdout.write(endColor);
311
+ console.log();
312
+ if (printTimings)
313
+ await context.printTimings();
314
+ }
315
+ }
316
+ //# sourceMappingURL=InfillCommand.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"InfillCommand.js","sourceRoot":"","sources":["../../../src/cli/commands/InfillCommand.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,QAAQ,MAAM,UAAU,CAAC;AACrC,OAAO,OAAO,MAAM,SAAS,CAAC;AAC9B,OAAO,IAAI,MAAM,MAAM,CAAC;AAExB,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,EAAE,MAAM,UAAU,CAAC;AAC1B,OAAO,cAAc,MAAM,+BAA+B,CAAC;AAC3D,OAAO,EAAC,QAAQ,EAAC,MAAM,4BAA4B,CAAC;AACpD,OAAO,EAAC,UAAU,EAAC,MAAM,+BAA+B,CAAC;AACzD,OAAO,EAAC,YAAY,EAAC,MAAM,8CAA8C,CAAC;AAC1E,OAAO,EAAC,aAAa,EAAC,MAAM,yBAAyB,CAAC;AACtD,OAAO,EAAC,eAAe,EAAC,MAAM,oCAAoC,CAAC;AA2BnE,MAAM,CAAC,MAAM,aAAa,GAAyC;IAC/D,OAAO,EAAE,QAAQ;IACjB,QAAQ,EAAE,mEAAmE;IAC7E,OAAO,CAAC,KAAK;QACT,OAAO,KAAK;aACP,MAAM,CAAC,OAAO,EAAE;YACb,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,YAAY,EAAE,IAAI;YAClB,WAAW,EAAE,sCAAsC;YACnD,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,YAAY,EAAE;YAClB,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,SAAS;YACf,OAAO,EAAE,KAAK;YACd,WAAW,EAAE,6BAA6B;YAC1C,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,QAAQ,EAAE;YACd,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,yCAAyC;YACtD,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,YAAY,EAAE;YAClB,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,uDAAuD;YACpE,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,QAAQ,EAAE;YACd,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,sFAAsF;YACnG,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,YAAY,EAAE;YAClB,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,oGAAoG;YACjH,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,aAAa,EAAE;YACnB,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,IAAI,GAAG,CAAC;YACjB,WAAW,EAAE,2CAA2C;YACxD,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,WAAW,EAAE;YACjB,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,gFAAgF;YAC7F,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,SAAS,EAAE;YACf,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC;YACV,WAAW,EAAE,uDAAuD;YACpE,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,aAAa,EAAE;YACnB,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC;YACV,WAAW,EAAE,6jBAA6jB;YAC1kB,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,MAAM,EAAE;YACZ,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC;YACV,WAAW,EAAE,+XAA+X;YAC5Y,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,MAAM,EAAE;YACZ,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,4SAA4S;YACzT,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,MAAM,EAAE;YACZ,KAAK,EAAE,GAAG;YACV,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,IAAI;YACb,WAAW,EAAE,qRAAqR;YAClS,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,WAAW,EAAE;YACjB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,mCAAmC;YAChD,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,eAAe,EAAE;YACrB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,GAAG;YACZ,WAAW,EAAE,kFAAkF;YAC/F,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,yBAAyB,EAAE;YAC/B,KAAK,EAAE,KAAK;YACZ,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,oFAAoF;YACjG,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,0BAA0B,EAAE;YAChC,KAAK,EAAE,MAAM;YACb,IAAI,EAAE,SAAS;YACf,OAAO,EAAE,IAAI;YACb,WAAW,EAAE,6FAA6F;YAC1G,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,wBAAwB,EAAE;YAC9B,KAAK,EAAE,KAAK;YACZ,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,yJAAyJ;YACtK,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,uBAAuB,EAAE;YAC7B,KAAK,EAAE,KAAK;YACZ,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE,+IAA+I;YAC5J,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,WAAW,EAAE;YACjB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC;YACV,WAAW,EAAE,kHAAkH;YAC/H,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,WAAW,EAAE;YACjB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,SAAS;YACf,OAAO,EAAE,KAAK;YACd,WAAW,EAAE,6BAA6B;YAC1C,KAAK,EAAE,WAAW;SACrB,CAAC;aACD,MAAM,CAAC,cAAc,EAAE;YACpB,KAAK,EAAE,IAAI;YACX,IAAI,EAAE,SAAS;YACf,OAAO,EAAE,KAAK;YACd,WAAW,EAAE,6CAA6C;YAC1D,KAAK,EAAE,WAAW;SACrB,CAAC,CAAC;IACX,CAAC;IACD,KAAK,CAAC,OAAO,CAAC,EACV,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,EAAE,SAAS,EACjF,OAAO,EAAE,WAAW,EAAE,IAAI,EAAE,IAAI,EAChC,IAAI,EAAE,SAAS,EAAE,aAAa,EAAE,uBAAuB,EAAE,wBAAwB,EACjF,sBAAsB,EAAE,qBAAqB,EAAE,SAAS,EACxD,SAAS,EAAE,YAAY,EAC1B;QACG,IAAI;YACA,MAAM,SAAS,CAAC;gBACZ,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,EAAE,SAAS;gBACjF,OAAO,EAAE,WAAW,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,SAAS,EAAE,uBAAuB;gBAC1E,aAAa,EAAE,wBAAwB,EAAE,sBAAsB,EAAE,qBAAqB,EAAE,SAAS;gBACjG,SAAS,EAAE,YAAY;aAC1B,CAAC,CAAC;SACN;QAAC,OAAO,GAAG,EAAE;YACV,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YACnB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACnB;IACL,CAAC;CACJ,CAAC;AAGF,KAAK,UAAU,SAAS,CAAC,EACrB,KAAK,EAAE,QAAQ,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,WAAW,EAAE,SAAS,EAC3F,OAAO,EAAE,WAAW,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,SAAS,EACjD,uBAAuB,EAAE,aAAa,EAAE,wBAAwB,EAAE,sBAAsB,EAAE,qBAAqB,EAC/G,SAAS,EAAE,SAAS,EAAE,YAAY,EACtB;IACZ,IAAI,SAAS;QACT,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;IAEvD,MAAM,KAAK,GAAG,MAAM,QAAQ,CAAC,WAAW,EAAE;QACtC,QAAQ,EAAE,SAAS;YACf,CAAC,CAAC,aAAa,CAAC,IAAI;YACpB,CAAC,CAAC,aAAa,CAAC,KAAK;KAC5B,CAAC,CAAC;IACH,MAAM,YAAY,GAAG,SAAS,IAAI,IAAI,CAAC;IAEvC,IAAI,UAAU;QACV,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;IAElC,IAAI,UAAU,IAAI,IAAI,IAAI,UAAU,KAAK,EAAE,EAAE;QACzC,IAAI,MAAM,IAAI,IAAI,IAAI,MAAM,KAAK,EAAE;YAC/B,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,2EAA2E,CAAC,CAAC,CAAC;QAE5G,MAAM,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,UAAU,CAAC,EAAE,MAAM,CAAC,CAAC;KAC/E;IAED,IAAI,UAAU,IAAI,IAAI,IAAI,UAAU,KAAK,EAAE,EAAE;QACzC,IAAI,MAAM,IAAI,IAAI,IAAI,MAAM,KAAK,EAAE;YAC/B,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,2EAA2E,CAAC,CAAC,CAAC;QAE5G,MAAM,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,UAAU,CAAC,EAAE,MAAM,CAAC,CAAC;KAC/E;IAED,IAAI,MAAM,IAAI,IAAI,IAAI,MAAM,IAAI,IAAI,EAAE;QAClC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,2EAA2E,CAAC,CAAC,CAAC;QACxG,MAAM,GAAG,SAAS,CAAC;KACtB;IAED,IAAI,SAAS,IAAI,IAAI;QACjB,SAAS,GAAG,WAAW,CAAC;SACvB,IAAI,SAAS,GAAG,WAAW,EAAE;QAC9B,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,0FAA0F,CAAC,CAAC,CAAC;QACvH,SAAS,GAAG,WAAW,CAAC;KAC3B;IAED,IAAI,aAAa,GAAG,MAAM,IAAI,IAAI,CAAC;IACnC,IAAI,aAAa,GAAG,MAAM,IAAI,IAAI,CAAC;IAEnC,MAAM,KAAK,GAAG,MAAM,cAAc,CAAC;QAC/B,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC;QACpC,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC;QACnC,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,sBAAsB,CAAC;KAC3C,EAAE,KAAK,IAAI,EAAE,CAAC,IAAI,UAAU,CAAC;QAC1B,KAAK;QACL,SAAS,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,QAAQ,CAAC;QAChD,SAAS,EAAE,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS;KACvD,CAAC,CAAC,CAAC;IACJ,MAAM,OAAO,GAAG,MAAM,cAAc,CAAC;QACjC,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,kBAAkB,CAAC;QACvC,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,iBAAiB,CAAC;QACtC,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,0BAA0B,CAAC;KAC/C,EAAE,KAAK,IAAI,EAAE,CAAC,IAAI,YAAY,CAAC;QAC5B,KAAK;QACL,WAAW;QACX,SAAS;QACT,OAAO;KACV,CAAC,CAAC,CAAC;IAEJ,MAAM,UAAU,GAAG,IAAI,eAAe,CAAC;QACnC,eAAe,EAAE,OAAO,CAAC,WAAW,EAAE;KACzC,CAAC,CAAC;IAEH,MAAM,IAAI,OAAO,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,mCAAmC;IAEzF,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,eAAe,CAAC,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;IAExE,IAAI,YAAY;QACZ,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC,CAAC;IAExE,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,qBAAqB,CAAC,IAAI,KAAK,CAAC,gBAAgB,EAAE,CAAC,CAAC;IACjF,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,CAAC,eAAe,EAAE,CAAC,CAAC;IACxE,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,IAAI,aAAa,mBAAmB,uBAAuB,UAAU,CAAC,CAAC;IAEtH,IAAI,sBAAsB,IAAI,IAAI;QAC9B,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,2BAA2B,CAAC,IAAI,sBAAsB,EAAE,CAAC,CAAC;IAE3F,IAAI,qBAAqB,IAAI,IAAI;QAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,0BAA0B,CAAC,IAAI,qBAAqB,EAAE,CAAC,CAAC;IAEzF,IAAI,CAAC,wBAAwB;QACzB,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,8BAA8B,CAAC,WAAW,CAAC,CAAC;IAE7E,iDAAiD;IACjD,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC;IAErD,IAAI,CAAC,UAAU,CAAC,eAAe,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,wCAAwC,CAAC,CAAC,CAAC;QACjE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;KACnB;IAED,MAAM,iBAAiB,GAAa,EAAE,CAAC;IACvC,MAAM,iBAAiB,GAAa,EAAE,CAAC;IAEvC,KAAK,UAAU,QAAQ,CAAC,IAAyB;QAC7C,MAAM,EAAE,GAAG,QAAQ,CAAC,eAAe,CAAC;YAChC,KAAK,EAAE,OAAO,CAAC,KAAK;YACpB,MAAM,EAAE,OAAO,CAAC,MAAM;YACtB,OAAO,EAAE,IAAI,KAAK,QAAQ;gBACtB,CAAC,CAAC,iBAAiB,CAAC,KAAK,EAAE;gBAC3B,CAAC,CAAC,iBAAiB,CAAC,KAAK,EAAE;SAClC,CAAC,CAAC;QAEH,MAAM,GAAG,GAAW,MAAM,IAAI,OAAO,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC;QAClG,EAAE,CAAC,KAAK,EAAE,CAAC;QAEX,OAAO,GAAG,CAAC;IACf,CAAC;IAED,iDAAiD;IACjD,OAAO,IAAI,EAAE;QACT,MAAM,WAAW,GAAG,aAAa,IAAI,IAAI;YACrC,CAAC,CAAC,aAAa;YACf,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAE/B,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,aAAa,CAAC,CAAC;YACrD,aAAa,GAAG,IAAI,CAAC;SACxB;;YACG,MAAM,iBAAiB,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QAE9C,IAAI,WAAW,KAAK,OAAO;YACvB,MAAM;QAEV,MAAM,WAAW,GAAG,aAAa,IAAI,IAAI;YACrC,CAAC,CAAC,aAAa;YACf,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAE/B,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,aAAa,CAAC,CAAC;YACrD,aAAa,GAAG,IAAI,CAAC;SACxB;;YACG,MAAM,iBAAiB,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QAE9C,IAAI,WAAW,KAAK,OAAO;YACvB,MAAM;QAEV,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;QAE/C,MAAM,CAAC,UAAU,EAAE,QAAQ,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAEpE,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;QACjC,MAAM,UAAU,CAAC,wBAAwB,CAAC,WAAW,EAAE,WAAW,EAAE;YAChE,WAAW;YACX,IAAI;YACJ,IAAI;YACJ,IAAI;YACJ,aAAa,EAAE;gBACX,OAAO,EAAE,aAAa;gBACtB,gBAAgB,EAAE,sBAAsB,IAAI,IAAI,CAAC,CAAC,CAAC,sBAAsB,CAAC,CAAC,CAAC,SAAS;gBACrF,eAAe,EAAE,qBAAqB,IAAI,IAAI,CAAC,CAAC,CAAC,qBAAqB,CAAC,CAAC,CAAC,SAAS;gBAClF,eAAe,EAAE,wBAAwB;gBACzC,UAAU,EAAE,uBAAuB;aACtC;YACD,SAAS,EAAE,SAAS,KAAK,CAAC,CAAC;gBACvB,CAAC,CAAC,OAAO,CAAC,WAAW;gBACrB,CAAC,CAAC,SAAS,IAAI,CAAC;oBACZ,CAAC,CAAC,SAAS;oBACX,CAAC,CAAC,SAAS;YACnB,OAAO,CAAC,KAAK;gBACT,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;YAClD,CAAC;SACJ,CAAC,CAAC;QACH,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAC/B,OAAO,CAAC,GAAG,EAAE,CAAC;QAEd,IAAI,YAAY;YACZ,MAAM,OAAO,CAAC,YAAY,EAAE,CAAC;KACpC;AACL,CAAC"}
@@ -0,0 +1 @@
1
+ export declare const UNKNOWN_UNICODE_CHAR = "\uFFFD";