node-llama-cpp 2.8.4 → 3.0.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (185) hide show
  1. package/README.md +2 -2
  2. package/dist/ChatWrapper.d.ts +49 -0
  3. package/dist/ChatWrapper.js +120 -0
  4. package/dist/ChatWrapper.js.map +1 -0
  5. package/dist/chatWrappers/AlpacaChatWrapper.d.ts +12 -0
  6. package/dist/chatWrappers/AlpacaChatWrapper.js +21 -0
  7. package/dist/chatWrappers/AlpacaChatWrapper.js.map +1 -0
  8. package/dist/chatWrappers/ChatMLChatWrapper.d.ts +13 -0
  9. package/dist/chatWrappers/ChatMLChatWrapper.js +83 -0
  10. package/dist/chatWrappers/ChatMLChatWrapper.js.map +1 -0
  11. package/dist/chatWrappers/EmptyChatWrapper.d.ts +4 -0
  12. package/dist/chatWrappers/EmptyChatWrapper.js +5 -0
  13. package/dist/chatWrappers/EmptyChatWrapper.js.map +1 -0
  14. package/dist/chatWrappers/FalconChatWrapper.d.ts +21 -0
  15. package/dist/chatWrappers/FalconChatWrapper.js +104 -0
  16. package/dist/chatWrappers/FalconChatWrapper.js.map +1 -0
  17. package/dist/chatWrappers/FunctionaryChatWrapper.d.ts +41 -0
  18. package/dist/chatWrappers/FunctionaryChatWrapper.js +200 -0
  19. package/dist/chatWrappers/FunctionaryChatWrapper.js.map +1 -0
  20. package/dist/chatWrappers/GeneralChatWrapper.d.ts +21 -0
  21. package/dist/chatWrappers/GeneralChatWrapper.js +112 -0
  22. package/dist/chatWrappers/GeneralChatWrapper.js.map +1 -0
  23. package/dist/chatWrappers/LlamaChatWrapper.d.ts +13 -0
  24. package/dist/chatWrappers/LlamaChatWrapper.js +78 -0
  25. package/dist/chatWrappers/LlamaChatWrapper.js.map +1 -0
  26. package/dist/chatWrappers/resolveChatWrapperBasedOnModel.d.ts +13 -0
  27. package/dist/chatWrappers/resolveChatWrapperBasedOnModel.js +55 -0
  28. package/dist/chatWrappers/resolveChatWrapperBasedOnModel.js.map +1 -0
  29. package/dist/cli/cli.js +1 -1
  30. package/dist/cli/cli.js.map +1 -1
  31. package/dist/cli/commands/ChatCommand.d.ts +2 -1
  32. package/dist/cli/commands/ChatCommand.js +90 -42
  33. package/dist/cli/commands/ChatCommand.js.map +1 -1
  34. package/dist/config.js +1 -1
  35. package/dist/config.js.map +1 -1
  36. package/dist/index.d.ts +20 -12
  37. package/dist/index.js +19 -11
  38. package/dist/index.js.map +1 -1
  39. package/dist/llamaEvaluator/LlamaBins.d.ts +18 -4
  40. package/dist/llamaEvaluator/LlamaBins.js +3 -3
  41. package/dist/llamaEvaluator/LlamaChat/LlamaChat.d.ts +175 -0
  42. package/dist/llamaEvaluator/LlamaChat/LlamaChat.js +704 -0
  43. package/dist/llamaEvaluator/LlamaChat/LlamaChat.js.map +1 -0
  44. package/dist/llamaEvaluator/LlamaChat/utils/FunctionCallGrammar.d.ts +21 -0
  45. package/dist/llamaEvaluator/LlamaChat/utils/FunctionCallGrammar.js +120 -0
  46. package/dist/llamaEvaluator/LlamaChat/utils/FunctionCallGrammar.js.map +1 -0
  47. package/dist/llamaEvaluator/LlamaChat/utils/contextShiftStrategies/eraseFirstResponseAndKeepFirstSystemChatContextShiftStrategy.d.ts +16 -0
  48. package/dist/llamaEvaluator/LlamaChat/utils/contextShiftStrategies/eraseFirstResponseAndKeepFirstSystemChatContextShiftStrategy.js +117 -0
  49. package/dist/llamaEvaluator/LlamaChat/utils/contextShiftStrategies/eraseFirstResponseAndKeepFirstSystemChatContextShiftStrategy.js.map +1 -0
  50. package/dist/llamaEvaluator/LlamaChatSession/LlamaChatSession.d.ts +146 -0
  51. package/dist/llamaEvaluator/LlamaChatSession/LlamaChatSession.js +211 -0
  52. package/dist/llamaEvaluator/LlamaChatSession/LlamaChatSession.js.map +1 -0
  53. package/dist/llamaEvaluator/LlamaChatSession/utils/defineChatSessionFunction.d.ts +7 -0
  54. package/dist/llamaEvaluator/LlamaChatSession/utils/defineChatSessionFunction.js +8 -0
  55. package/dist/llamaEvaluator/LlamaChatSession/utils/defineChatSessionFunction.js.map +1 -0
  56. package/dist/llamaEvaluator/LlamaContext/LlamaContext.d.ts +107 -0
  57. package/dist/llamaEvaluator/LlamaContext/LlamaContext.js +597 -0
  58. package/dist/llamaEvaluator/LlamaContext/LlamaContext.js.map +1 -0
  59. package/dist/llamaEvaluator/LlamaContext/types.d.ts +86 -0
  60. package/dist/llamaEvaluator/LlamaContext/types.js +2 -0
  61. package/dist/llamaEvaluator/LlamaContext/types.js.map +1 -0
  62. package/dist/llamaEvaluator/LlamaContext/utils/batchItemsPrioritizingStrategies/firstInFirstOutStrategy.d.ts +5 -0
  63. package/dist/llamaEvaluator/LlamaContext/utils/batchItemsPrioritizingStrategies/firstInFirstOutStrategy.js +16 -0
  64. package/dist/llamaEvaluator/LlamaContext/utils/batchItemsPrioritizingStrategies/firstInFirstOutStrategy.js.map +1 -0
  65. package/dist/llamaEvaluator/LlamaContext/utils/batchItemsPrioritizingStrategies/maximumParallelismStrategy.d.ts +5 -0
  66. package/dist/llamaEvaluator/LlamaContext/utils/batchItemsPrioritizingStrategies/maximumParallelismStrategy.js +42 -0
  67. package/dist/llamaEvaluator/LlamaContext/utils/batchItemsPrioritizingStrategies/maximumParallelismStrategy.js.map +1 -0
  68. package/dist/llamaEvaluator/LlamaContext/utils/resolveBatchItemsPrioritizingStrategy.d.ts +2 -0
  69. package/dist/llamaEvaluator/LlamaContext/utils/resolveBatchItemsPrioritizingStrategy.js +13 -0
  70. package/dist/llamaEvaluator/LlamaContext/utils/resolveBatchItemsPrioritizingStrategy.js.map +1 -0
  71. package/dist/llamaEvaluator/LlamaGrammar.d.ts +9 -13
  72. package/dist/llamaEvaluator/LlamaGrammar.js +10 -15
  73. package/dist/llamaEvaluator/LlamaGrammar.js.map +1 -1
  74. package/dist/llamaEvaluator/LlamaGrammarEvaluationState.d.ts +6 -5
  75. package/dist/llamaEvaluator/LlamaGrammarEvaluationState.js +8 -7
  76. package/dist/llamaEvaluator/LlamaGrammarEvaluationState.js.map +1 -1
  77. package/dist/llamaEvaluator/LlamaJsonSchemaGrammar.js +2 -1
  78. package/dist/llamaEvaluator/LlamaJsonSchemaGrammar.js.map +1 -1
  79. package/dist/llamaEvaluator/LlamaModel.d.ts +101 -105
  80. package/dist/llamaEvaluator/LlamaModel.js +305 -57
  81. package/dist/llamaEvaluator/LlamaModel.js.map +1 -1
  82. package/dist/types.d.ts +44 -4
  83. package/dist/types.js +5 -1
  84. package/dist/types.js.map +1 -1
  85. package/dist/utils/LlamaText.d.ts +42 -0
  86. package/dist/utils/LlamaText.js +207 -0
  87. package/dist/utils/LlamaText.js.map +1 -0
  88. package/dist/utils/ReplHistory.js +1 -1
  89. package/dist/utils/ReplHistory.js.map +1 -1
  90. package/dist/utils/StopGenerationDetector.d.ts +28 -0
  91. package/dist/utils/StopGenerationDetector.js +205 -0
  92. package/dist/utils/StopGenerationDetector.js.map +1 -0
  93. package/dist/utils/TokenStreamRegulator.d.ts +30 -0
  94. package/dist/utils/TokenStreamRegulator.js +96 -0
  95. package/dist/utils/TokenStreamRegulator.js.map +1 -0
  96. package/dist/utils/appendUserMessageToChatHistory.d.ts +2 -0
  97. package/dist/utils/appendUserMessageToChatHistory.js +18 -0
  98. package/dist/utils/appendUserMessageToChatHistory.js.map +1 -0
  99. package/dist/utils/compareTokens.d.ts +2 -0
  100. package/dist/utils/compareTokens.js +4 -0
  101. package/dist/utils/compareTokens.js.map +1 -0
  102. package/dist/utils/findCharacterRemovalCountToFitChatHistoryInContext.d.ts +18 -0
  103. package/dist/utils/findCharacterRemovalCountToFitChatHistoryInContext.js +61 -0
  104. package/dist/utils/findCharacterRemovalCountToFitChatHistoryInContext.js.map +1 -0
  105. package/dist/utils/gbnfJson/GbnfGrammarGenerator.d.ts +1 -0
  106. package/dist/utils/gbnfJson/GbnfGrammarGenerator.js +17 -0
  107. package/dist/utils/gbnfJson/GbnfGrammarGenerator.js.map +1 -1
  108. package/dist/utils/gbnfJson/GbnfTerminal.d.ts +1 -1
  109. package/dist/utils/gbnfJson/GbnfTerminal.js.map +1 -1
  110. package/dist/utils/gbnfJson/terminals/GbnfVerbatimText.d.ts +6 -0
  111. package/dist/utils/gbnfJson/terminals/GbnfVerbatimText.js +21 -0
  112. package/dist/utils/gbnfJson/terminals/GbnfVerbatimText.js.map +1 -0
  113. package/dist/utils/gbnfJson/types.d.ts +1 -1
  114. package/dist/utils/gbnfJson/types.js.map +1 -1
  115. package/dist/utils/gbnfJson/utils/validateObjectAgainstGbnfSchema.d.ts +1 -0
  116. package/dist/utils/gbnfJson/utils/validateObjectAgainstGbnfSchema.js.map +1 -1
  117. package/dist/utils/getBin.d.ts +71 -38
  118. package/dist/utils/getBin.js.map +1 -1
  119. package/dist/utils/getGbnfGrammarForGbnfJsonSchema.js +1 -15
  120. package/dist/utils/getGbnfGrammarForGbnfJsonSchema.js.map +1 -1
  121. package/dist/utils/getReleaseInfo.d.ts +1 -1
  122. package/dist/utils/getReleaseInfo.js.map +1 -1
  123. package/dist/utils/getTypeScriptTypeStringForGbnfJsonSchema.d.ts +2 -0
  124. package/dist/utils/getTypeScriptTypeStringForGbnfJsonSchema.js +49 -0
  125. package/dist/utils/getTypeScriptTypeStringForGbnfJsonSchema.js.map +1 -0
  126. package/dist/utils/parseModelFileName.d.ts +9 -0
  127. package/dist/utils/parseModelFileName.js +68 -0
  128. package/dist/utils/parseModelFileName.js.map +1 -0
  129. package/dist/utils/parseModelTypeDescription.d.ts +6 -0
  130. package/dist/utils/parseModelTypeDescription.js +9 -0
  131. package/dist/utils/parseModelTypeDescription.js.map +1 -0
  132. package/dist/utils/resolveChatWrapper.d.ts +4 -0
  133. package/dist/utils/resolveChatWrapper.js +16 -0
  134. package/dist/utils/resolveChatWrapper.js.map +1 -0
  135. package/dist/utils/truncateTextAndRoundToWords.d.ts +8 -0
  136. package/dist/utils/truncateTextAndRoundToWords.js +27 -0
  137. package/dist/utils/truncateTextAndRoundToWords.js.map +1 -0
  138. package/llama/.clang-format +10 -9
  139. package/llama/addon.cpp +701 -352
  140. package/llama/gitRelease.bundle +0 -0
  141. package/llamaBins/linux-arm64/llama-addon.node +0 -0
  142. package/llamaBins/linux-armv7l/llama-addon.node +0 -0
  143. package/llamaBins/linux-x64/llama-addon.node +0 -0
  144. package/llamaBins/mac-arm64/llama-addon.node +0 -0
  145. package/llamaBins/mac-x64/llama-addon.node +0 -0
  146. package/llamaBins/win-x64/llama-addon.node +0 -0
  147. package/package.json +24 -14
  148. package/dist/ChatPromptWrapper.d.ts +0 -11
  149. package/dist/ChatPromptWrapper.js +0 -20
  150. package/dist/ChatPromptWrapper.js.map +0 -1
  151. package/dist/chatWrappers/ChatMLChatPromptWrapper.d.ts +0 -12
  152. package/dist/chatWrappers/ChatMLChatPromptWrapper.js +0 -22
  153. package/dist/chatWrappers/ChatMLChatPromptWrapper.js.map +0 -1
  154. package/dist/chatWrappers/EmptyChatPromptWrapper.d.ts +0 -4
  155. package/dist/chatWrappers/EmptyChatPromptWrapper.js +0 -5
  156. package/dist/chatWrappers/EmptyChatPromptWrapper.js.map +0 -1
  157. package/dist/chatWrappers/FalconChatPromptWrapper.d.ts +0 -19
  158. package/dist/chatWrappers/FalconChatPromptWrapper.js +0 -33
  159. package/dist/chatWrappers/FalconChatPromptWrapper.js.map +0 -1
  160. package/dist/chatWrappers/GeneralChatPromptWrapper.d.ts +0 -19
  161. package/dist/chatWrappers/GeneralChatPromptWrapper.js +0 -38
  162. package/dist/chatWrappers/GeneralChatPromptWrapper.js.map +0 -1
  163. package/dist/chatWrappers/LlamaChatPromptWrapper.d.ts +0 -12
  164. package/dist/chatWrappers/LlamaChatPromptWrapper.js +0 -23
  165. package/dist/chatWrappers/LlamaChatPromptWrapper.js.map +0 -1
  166. package/dist/chatWrappers/createChatWrapperByBos.d.ts +0 -2
  167. package/dist/chatWrappers/createChatWrapperByBos.js +0 -14
  168. package/dist/chatWrappers/createChatWrapperByBos.js.map +0 -1
  169. package/dist/chatWrappers/generateContextTextFromConversationHistory.d.ts +0 -23
  170. package/dist/chatWrappers/generateContextTextFromConversationHistory.js +0 -47
  171. package/dist/chatWrappers/generateContextTextFromConversationHistory.js.map +0 -1
  172. package/dist/llamaEvaluator/LlamaChatSession.d.ts +0 -122
  173. package/dist/llamaEvaluator/LlamaChatSession.js +0 -236
  174. package/dist/llamaEvaluator/LlamaChatSession.js.map +0 -1
  175. package/dist/llamaEvaluator/LlamaContext.d.ts +0 -98
  176. package/dist/llamaEvaluator/LlamaContext.js +0 -140
  177. package/dist/llamaEvaluator/LlamaContext.js.map +0 -1
  178. package/dist/utils/getTextCompletion.d.ts +0 -3
  179. package/dist/utils/getTextCompletion.js +0 -12
  180. package/dist/utils/getTextCompletion.js.map +0 -1
  181. package/dist/utils/withLock.d.ts +0 -1
  182. package/dist/utils/withLock.js +0 -19
  183. package/dist/utils/withLock.js.map +0 -1
  184. package/llamaBins/mac-arm64/ggml-metal.metal +0 -5820
  185. package/llamaBins/mac-x64/ggml-metal.metal +0 -5820
Binary file
Binary file
Binary file
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "node-llama-cpp",
3
- "version": "2.8.4",
3
+ "version": "3.0.0-beta.2",
4
4
  "description": "Run AI models locally on your machine with node.js bindings for llama.cpp. Force a JSON schema on the model output on the generation level",
5
5
  "main": "dist/index.js",
6
6
  "type": "module",
@@ -47,16 +47,19 @@
47
47
  "prewatch": "rimraf ./dist ./tsconfig.tsbuildinfo",
48
48
  "watch": "tsc --build tsconfig.json --watch --force",
49
49
  "cmake-js-llama": "cd llama && cmake-js",
50
- "test": "npm run test:typescript && npm run lint:eslint && npm run test:standalone",
51
- "test:standalone": "vitest run ./test",
52
- "test:standalone:interactive": "vitest watch ./test",
50
+ "test": "npm run test:typescript && npm run lint:eslint && npm run test:standalone && npm run test:modelDependent",
51
+ "test:standalone": "vitest run ./test/standalone",
52
+ "test:standalone:interactive": "vitest watch ./test/standalone",
53
+ "test:modelDependent": "vitest run ./test/modelDependent",
54
+ "test:modelDependent:interactive": "vitest watch ./test/modelDependent",
53
55
  "test:typescript": "tsc --build tsconfig.json --dry --force",
54
56
  "lint": "npm run lint:eslint",
55
57
  "lint:eslint": "eslint --ext .js --ext .ts .",
56
58
  "format": "npm run lint:eslint -- --fix",
57
- "dev:setup": "npm run build && node ./dist/cli/cli.js download && npm run docs:generateTypedoc",
59
+ "dev:setup:downloadAllTestModels": "node --loader ts-node/esm test/utils/scripts/downloadAllTestModels.ts",
60
+ "dev:setup": "npm run build && node ./dist/cli/cli.js download && npm run docs:generateTypedoc && npm run dev:setup:downloadAllTestModels",
58
61
  "dev:build": "npm run build && node ./dist/cli/cli.js build",
59
- "clean": "rm -rf ./node_modules ./dist ./tsconfig.tsbuildinfo",
62
+ "clean": "rm -rf ./node_modules ./dist ./tsconfig.tsbuildinfo ./test/.models",
60
63
  "docs:generateTypedoc": "typedoc && rimraf ./docs/api/index.md ./docs/api/exports.md",
61
64
  "docs:dev": "npm run docs:generateTypedoc && vitepress dev",
62
65
  "docs:build": "npm run docs:generateTypedoc && vitepress build",
@@ -97,6 +100,10 @@
97
100
  "bugs": {
98
101
  "url": "https://github.com/withcatai/node-llama-cpp/issues"
99
102
  },
103
+ "funding": {
104
+ "type": "github",
105
+ "url": "https://github.com/sponsors/giladgd"
106
+ },
100
107
  "homepage": "https://withcatai.github.io/node-llama-cpp/",
101
108
  "devDependencies": {
102
109
  "@commitlint/cli": "^17.7.1",
@@ -114,18 +121,20 @@
114
121
  "@vitest/coverage-v8": "^0.34.6",
115
122
  "eslint": "^8.46.0",
116
123
  "eslint-plugin-import": "^2.28.0",
117
- "eslint-plugin-node": "github:giladgd/eslint-plugin-node#dev/giladgd/fixImportExtentionFixingInTypeScript",
124
+ "eslint-plugin-jsdoc": "^46.9.0",
125
+ "eslint-plugin-n": "^16.3.1",
118
126
  "husky": "^8.0.3",
127
+ "ipull": "^1.2.1",
119
128
  "rimraf": "^5.0.1",
120
- "semantic-release": "^21.0.7",
129
+ "semantic-release": "^22.0.8",
121
130
  "ts-node": "^10.9.1",
122
131
  "tslib": "^2.6.1",
123
- "typedoc": "^0.25.1",
124
- "typedoc-plugin-markdown": "^4.0.0-next.22",
125
- "typedoc-plugin-mdn-links": "^3.1.0",
126
- "typedoc-vitepress-theme": "^1.0.0-next.3",
127
- "typescript": "^5.1.6",
128
- "vitepress": "^1.0.0-rc.20",
132
+ "typedoc": "^0.25.3",
133
+ "typedoc-plugin-markdown": "4.0.0-next.41",
134
+ "typedoc-plugin-mdn-links": "^3.1.5",
135
+ "typedoc-vitepress-theme": "^1.0.0-next.7",
136
+ "typescript": "^5.2.2",
137
+ "vitepress": "1.0.0-rc.22",
129
138
  "vitest": "^0.34.6",
130
139
  "zx": "^7.2.3"
131
140
  },
@@ -138,6 +147,7 @@
138
147
  "cross-spawn": "^7.0.3",
139
148
  "env-var": "^7.3.1",
140
149
  "fs-extra": "^11.1.1",
150
+ "lifecycle-utils": "^1.1.3",
141
151
  "log-symbols": "^5.1.0",
142
152
  "node-addon-api": "^7.0.0",
143
153
  "octokit": "^3.1.0",
@@ -1,11 +0,0 @@
1
- export declare abstract class ChatPromptWrapper {
2
- abstract readonly wrapperName: string;
3
- wrapPrompt(prompt: string, { systemPrompt, promptIndex }: {
4
- systemPrompt: string;
5
- promptIndex: number;
6
- lastStopString: string | null;
7
- lastStopStringSuffix: string | null;
8
- }): string;
9
- getStopStrings(): string[];
10
- getDefaultStopString(): string;
11
- }
@@ -1,20 +0,0 @@
1
- export class ChatPromptWrapper {
2
- wrapPrompt(prompt, { systemPrompt, promptIndex }) {
3
- if (promptIndex === 0) {
4
- return systemPrompt + "\n" + prompt;
5
- }
6
- else {
7
- return prompt;
8
- }
9
- }
10
- getStopStrings() {
11
- return [];
12
- }
13
- getDefaultStopString() {
14
- const stopString = this.getStopStrings()[0];
15
- if (stopString == null || stopString.length === 0)
16
- throw new Error(`Prompt wrapper "${this.wrapperName}" has no stop strings`);
17
- return stopString;
18
- }
19
- }
20
- //# sourceMappingURL=ChatPromptWrapper.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"ChatPromptWrapper.js","sourceRoot":"","sources":["../src/ChatPromptWrapper.ts"],"names":[],"mappings":"AAAA,MAAM,OAAgB,iBAAiB;IAG5B,UAAU,CAAC,MAAc,EAAE,EAAC,YAAY,EAAE,WAAW,EAE3D;QACG,IAAI,WAAW,KAAK,CAAC,EAAE;YACnB,OAAO,YAAY,GAAG,IAAI,GAAG,MAAM,CAAC;SACvC;aAAM;YACH,OAAO,MAAM,CAAC;SACjB;IACL,CAAC;IAEM,cAAc;QACjB,OAAO,EAAE,CAAC;IACd,CAAC;IAEM,oBAAoB;QACvB,MAAM,UAAU,GAAG,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC,CAAC,CAAC;QAE5C,IAAI,UAAU,IAAI,IAAI,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC;YAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,IAAI,CAAC,WAAW,uBAAuB,CAAC,CAAC;QAEhF,OAAO,UAAU,CAAC;IACtB,CAAC;CACJ"}
@@ -1,12 +0,0 @@
1
- import { ChatPromptWrapper } from "../ChatPromptWrapper.js";
2
- export declare class ChatMLChatPromptWrapper extends ChatPromptWrapper {
3
- readonly wrapperName: string;
4
- wrapPrompt(prompt: string, { systemPrompt, promptIndex, lastStopString, lastStopStringSuffix }: {
5
- systemPrompt: string;
6
- promptIndex: number;
7
- lastStopString: string | null;
8
- lastStopStringSuffix: string | null;
9
- }): string;
10
- getStopStrings(): string[];
11
- getDefaultStopString(): string;
12
- }
@@ -1,22 +0,0 @@
1
- import { ChatPromptWrapper } from "../ChatPromptWrapper.js";
2
- import { getTextCompletion } from "../utils/getTextCompletion.js";
3
- // source: https://github.com/openai/openai-python/blob/120d225b91a8453e15240a49fb1c6794d8119326/chatml.md
4
- export class ChatMLChatPromptWrapper extends ChatPromptWrapper {
5
- wrapperName = "ChatML";
6
- wrapPrompt(prompt, { systemPrompt, promptIndex, lastStopString, lastStopStringSuffix }) {
7
- const previousCompletionEnd = (lastStopString ?? "") + (lastStopStringSuffix ?? "");
8
- if (promptIndex === 0 && systemPrompt != "")
9
- return (getTextCompletion(previousCompletionEnd, "<|im_start|>system\n") ?? "<|im_start|>system\n") +
10
- systemPrompt + "<|im_end|>\n<|im_start|>user\n" + prompt + "<|im_end|>\n<|im_start|>assistant\n";
11
- else
12
- return (getTextCompletion(previousCompletionEnd, "<|im_end|>\n<|im_start|>user\n") ?? "<|im_end|>\n<|im_start|>user\n") +
13
- prompt + "<|im_end|>\n<|im_start|>assistant\n";
14
- }
15
- getStopStrings() {
16
- return ["<|im_end|>"];
17
- }
18
- getDefaultStopString() {
19
- return "<|im_end|>";
20
- }
21
- }
22
- //# sourceMappingURL=ChatMLChatPromptWrapper.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"ChatMLChatPromptWrapper.js","sourceRoot":"","sources":["../../src/chatWrappers/ChatMLChatPromptWrapper.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,iBAAiB,EAAC,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAC,iBAAiB,EAAC,MAAM,+BAA+B,CAAC;AAEhE,0GAA0G;AAC1G,MAAM,OAAO,uBAAwB,SAAQ,iBAAiB;IAC1C,WAAW,GAAW,QAAQ,CAAC;IAE/B,UAAU,CAAC,MAAc,EAAE,EAAC,YAAY,EAAE,WAAW,EAAE,cAAc,EAAE,oBAAoB,EAE1G;QACG,MAAM,qBAAqB,GAAG,CAAC,cAAc,IAAI,EAAE,CAAC,GAAG,CAAC,oBAAoB,IAAI,EAAE,CAAC,CAAC;QAEpF,IAAI,WAAW,KAAK,CAAC,IAAI,YAAY,IAAI,EAAE;YACvC,OAAO,CAAC,iBAAiB,CAAC,qBAAqB,EAAE,sBAAsB,CAAC,IAAI,sBAAsB,CAAC;gBAC/F,YAAY,GAAG,gCAAgC,GAAG,MAAM,GAAG,qCAAqC,CAAC;;YAErG,OAAO,CAAC,iBAAiB,CAAC,qBAAqB,EAAE,gCAAgC,CAAC,IAAI,gCAAgC,CAAC;gBACnH,MAAM,GAAG,qCAAqC,CAAC;IAC3D,CAAC;IAEe,cAAc;QAC1B,OAAO,CAAC,YAAY,CAAC,CAAC;IAC1B,CAAC;IAEe,oBAAoB;QAChC,OAAO,YAAY,CAAC;IACxB,CAAC;CACJ"}
@@ -1,4 +0,0 @@
1
- import { ChatPromptWrapper } from "../ChatPromptWrapper.js";
2
- export declare class EmptyChatPromptWrapper extends ChatPromptWrapper {
3
- readonly wrapperName: string;
4
- }
@@ -1,5 +0,0 @@
1
- import { ChatPromptWrapper } from "../ChatPromptWrapper.js";
2
- export class EmptyChatPromptWrapper extends ChatPromptWrapper {
3
- wrapperName = "Empty";
4
- }
5
- //# sourceMappingURL=EmptyChatPromptWrapper.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"EmptyChatPromptWrapper.js","sourceRoot":"","sources":["../../src/chatWrappers/EmptyChatPromptWrapper.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,iBAAiB,EAAC,MAAM,yBAAyB,CAAC;AAE1D,MAAM,OAAO,sBAAuB,SAAQ,iBAAiB;IACzC,WAAW,GAAW,OAAO,CAAC;CACjD"}
@@ -1,19 +0,0 @@
1
- import { ChatPromptWrapper } from "../ChatPromptWrapper.js";
2
- export declare class FalconChatPromptWrapper extends ChatPromptWrapper {
3
- readonly wrapperName: string;
4
- private readonly _instructionName;
5
- private readonly _responseName;
6
- constructor({ instructionName, responseName }?: {
7
- instructionName?: string;
8
- responseName?: string;
9
- });
10
- wrapPrompt(prompt: string, { systemPrompt, promptIndex, lastStopString, lastStopStringSuffix }: {
11
- systemPrompt: string;
12
- promptIndex: number;
13
- lastStopString: string | null;
14
- lastStopStringSuffix: string | null;
15
- }): string;
16
- getStopStrings(): string[];
17
- getDefaultStopString(): string;
18
- private _getPromptPrefix;
19
- }
@@ -1,33 +0,0 @@
1
- import { ChatPromptWrapper } from "../ChatPromptWrapper.js";
2
- import { getTextCompletion } from "../utils/getTextCompletion.js";
3
- export class FalconChatPromptWrapper extends ChatPromptWrapper {
4
- wrapperName = "Falcon";
5
- _instructionName;
6
- _responseName;
7
- constructor({ instructionName = "User", responseName = "Assistant" } = {}) {
8
- super();
9
- this._instructionName = instructionName;
10
- this._responseName = responseName;
11
- }
12
- wrapPrompt(prompt, { systemPrompt, promptIndex, lastStopString, lastStopStringSuffix }) {
13
- if (promptIndex === 0)
14
- return systemPrompt + `\n${this._instructionName}: ` + prompt + `\n${this._responseName}: `;
15
- return this._getPromptPrefix(lastStopString, lastStopStringSuffix) + prompt + `\n${this._responseName}: `;
16
- }
17
- getStopStrings() {
18
- return [
19
- `\n${this._instructionName}: `,
20
- `\n${this._responseName}:`
21
- ];
22
- }
23
- getDefaultStopString() {
24
- return `\n${this._instructionName}: `;
25
- }
26
- _getPromptPrefix(lastStopString, lastStopStringSuffix) {
27
- return getTextCompletion((lastStopString ?? "") + (lastStopStringSuffix ?? ""), [
28
- `\n${this._instructionName}: `,
29
- `${this._instructionName}: `
30
- ]) ?? `\n${this._instructionName}: `;
31
- }
32
- }
33
- //# sourceMappingURL=FalconChatPromptWrapper.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"FalconChatPromptWrapper.js","sourceRoot":"","sources":["../../src/chatWrappers/FalconChatPromptWrapper.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,iBAAiB,EAAC,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAC,iBAAiB,EAAC,MAAM,+BAA+B,CAAC;AAEhE,MAAM,OAAO,uBAAwB,SAAQ,iBAAiB;IAC1C,WAAW,GAAW,QAAQ,CAAC;IAC9B,gBAAgB,CAAS;IACzB,aAAa,CAAS;IAEvC,YAAmB,EAAC,eAAe,GAAG,MAAM,EAAE,YAAY,GAAG,WAAW,KAAuD,EAAE;QAC7H,KAAK,EAAE,CAAC;QAER,IAAI,CAAC,gBAAgB,GAAG,eAAe,CAAC;QACxC,IAAI,CAAC,aAAa,GAAG,YAAY,CAAC;IACtC,CAAC;IAEe,UAAU,CAAC,MAAc,EAAE,EAAC,YAAY,EAAE,WAAW,EAAE,cAAc,EAAE,oBAAoB,EAE1G;QACG,IAAI,WAAW,KAAK,CAAC;YACjB,OAAO,YAAY,GAAG,KAAK,IAAI,CAAC,gBAAgB,IAAI,GAAG,MAAM,GAAG,KAAK,IAAI,CAAC,aAAa,IAAI,CAAC;QAEhG,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,EAAE,oBAAoB,CAAC,GAAG,MAAM,GAAG,KAAK,IAAI,CAAC,aAAa,IAAI,CAAC;IAC9G,CAAC;IAEe,cAAc;QAC1B,OAAO;YACH,KAAK,IAAI,CAAC,gBAAgB,IAAI;YAC9B,KAAK,IAAI,CAAC,aAAa,GAAG;SAC7B,CAAC;IACN,CAAC;IAEe,oBAAoB;QAChC,OAAO,KAAK,IAAI,CAAC,gBAAgB,IAAI,CAAC;IAC1C,CAAC;IAEO,gBAAgB,CAAC,cAA6B,EAAE,oBAAmC;QACvF,OAAO,iBAAiB,CAAC,CAAC,cAAc,IAAI,EAAE,CAAC,GAAG,CAAC,oBAAoB,IAAI,EAAE,CAAC,EAAE;YAC5E,KAAK,IAAI,CAAC,gBAAgB,IAAI;YAC9B,GAAG,IAAI,CAAC,gBAAgB,IAAI;SAC/B,CAAC,IAAI,KAAK,IAAI,CAAC,gBAAgB,IAAI,CAAC;IACzC,CAAC;CACJ"}
@@ -1,19 +0,0 @@
1
- import { ChatPromptWrapper } from "../ChatPromptWrapper.js";
2
- export declare class GeneralChatPromptWrapper extends ChatPromptWrapper {
3
- readonly wrapperName: string;
4
- private readonly _instructionName;
5
- private readonly _responseName;
6
- constructor({ instructionName, responseName }?: {
7
- instructionName?: string;
8
- responseName?: string;
9
- });
10
- wrapPrompt(prompt: string, { systemPrompt, promptIndex, lastStopString, lastStopStringSuffix }: {
11
- systemPrompt: string;
12
- promptIndex: number;
13
- lastStopString: string | null;
14
- lastStopStringSuffix: string | null;
15
- }): string;
16
- getStopStrings(): string[];
17
- getDefaultStopString(): string;
18
- private _getPromptPrefix;
19
- }
@@ -1,38 +0,0 @@
1
- import { ChatPromptWrapper } from "../ChatPromptWrapper.js";
2
- import { getTextCompletion } from "../utils/getTextCompletion.js";
3
- export class GeneralChatPromptWrapper extends ChatPromptWrapper {
4
- wrapperName = "General";
5
- _instructionName;
6
- _responseName;
7
- constructor({ instructionName = "Human", responseName = "Assistant" } = {}) {
8
- super();
9
- this._instructionName = instructionName;
10
- this._responseName = responseName;
11
- }
12
- wrapPrompt(prompt, { systemPrompt, promptIndex, lastStopString, lastStopStringSuffix }) {
13
- if (promptIndex === 0)
14
- return systemPrompt + `\n\n### ${this._instructionName}:\n` + prompt + `\n\n### ${this._responseName}:\n`;
15
- return this._getPromptPrefix(lastStopString, lastStopStringSuffix) + prompt + `\n\n### ${this._responseName}:\n`;
16
- }
17
- getStopStrings() {
18
- return [
19
- `\n\n### ${this._instructionName}`,
20
- `### ${this._instructionName}`,
21
- `\n\n### ${this._responseName}`,
22
- `### ${this._responseName}`,
23
- "<end>"
24
- ];
25
- }
26
- getDefaultStopString() {
27
- return `\n\n### ${this._instructionName}`;
28
- }
29
- _getPromptPrefix(lastStopString, lastStopStringSuffix) {
30
- return getTextCompletion(lastStopString === "<end>"
31
- ? lastStopStringSuffix
32
- : ((lastStopString ?? "") + (lastStopStringSuffix ?? "")), [
33
- `\n\n### ${this._instructionName}:\n`,
34
- `### ${this._instructionName}:\n`
35
- ]) ?? `\n\n### ${this._instructionName}:\n`;
36
- }
37
- }
38
- //# sourceMappingURL=GeneralChatPromptWrapper.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"GeneralChatPromptWrapper.js","sourceRoot":"","sources":["../../src/chatWrappers/GeneralChatPromptWrapper.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,iBAAiB,EAAC,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAC,iBAAiB,EAAC,MAAM,+BAA+B,CAAC;AAEhE,MAAM,OAAO,wBAAyB,SAAQ,iBAAiB;IAC3C,WAAW,GAAW,SAAS,CAAC;IAC/B,gBAAgB,CAAS;IACzB,aAAa,CAAS;IAEvC,YAAmB,EAAC,eAAe,GAAG,OAAO,EAAE,YAAY,GAAG,WAAW,KAAuD,EAAE;QAC9H,KAAK,EAAE,CAAC;QAER,IAAI,CAAC,gBAAgB,GAAG,eAAe,CAAC;QACxC,IAAI,CAAC,aAAa,GAAG,YAAY,CAAC;IACtC,CAAC;IAEe,UAAU,CAAC,MAAc,EAAE,EAAC,YAAY,EAAE,WAAW,EAAE,cAAc,EAAE,oBAAoB,EAE1G;QACG,IAAI,WAAW,KAAK,CAAC;YACjB,OAAO,YAAY,GAAG,WAAW,IAAI,CAAC,gBAAgB,KAAK,GAAG,MAAM,GAAG,WAAW,IAAI,CAAC,aAAa,KAAK,CAAC;QAE9G,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,EAAE,oBAAoB,CAAC,GAAG,MAAM,GAAG,WAAW,IAAI,CAAC,aAAa,KAAK,CAAC;IACrH,CAAC;IAEe,cAAc;QAC1B,OAAO;YACH,WAAW,IAAI,CAAC,gBAAgB,EAAE;YAClC,OAAO,IAAI,CAAC,gBAAgB,EAAE;YAC9B,WAAW,IAAI,CAAC,aAAa,EAAE;YAC/B,OAAO,IAAI,CAAC,aAAa,EAAE;YAC3B,OAAO;SACV,CAAC;IACN,CAAC;IAEe,oBAAoB;QAChC,OAAO,WAAW,IAAI,CAAC,gBAAgB,EAAE,CAAC;IAC9C,CAAC;IAEO,gBAAgB,CAAC,cAA6B,EAAE,oBAAmC;QACvF,OAAO,iBAAiB,CACpB,cAAc,KAAK,OAAO;YACtB,CAAC,CAAC,oBAAoB;YACtB,CAAC,CAAC,CAAC,CAAC,cAAc,IAAI,EAAE,CAAC,GAAG,CAAC,oBAAoB,IAAI,EAAE,CAAC,CAAC,EAC7D;YACI,WAAW,IAAI,CAAC,gBAAgB,KAAK;YACrC,OAAO,IAAI,CAAC,gBAAgB,KAAK;SACpC,CACJ,IAAI,WAAW,IAAI,CAAC,gBAAgB,KAAK,CAAC;IAC/C,CAAC;CACJ"}
@@ -1,12 +0,0 @@
1
- import { ChatPromptWrapper } from "../ChatPromptWrapper.js";
2
- export declare class LlamaChatPromptWrapper extends ChatPromptWrapper {
3
- readonly wrapperName: string;
4
- wrapPrompt(prompt: string, { systemPrompt, promptIndex, lastStopString, lastStopStringSuffix }: {
5
- systemPrompt: string;
6
- promptIndex: number;
7
- lastStopString: string | null;
8
- lastStopStringSuffix: string | null;
9
- }): string;
10
- getStopStrings(): string[];
11
- getDefaultStopString(): string;
12
- }
@@ -1,23 +0,0 @@
1
- import { ChatPromptWrapper } from "../ChatPromptWrapper.js";
2
- import { getTextCompletion } from "../utils/getTextCompletion.js";
3
- // source: https://huggingface.co/blog/llama2#how-to-prompt-llama-2
4
- export class LlamaChatPromptWrapper extends ChatPromptWrapper {
5
- wrapperName = "LlamaChat";
6
- wrapPrompt(prompt, { systemPrompt, promptIndex, lastStopString, lastStopStringSuffix }) {
7
- const previousCompletionEnd = (lastStopString ?? "") + (lastStopStringSuffix ?? "");
8
- if (promptIndex === 0 && systemPrompt != "") {
9
- return (getTextCompletion(previousCompletionEnd, "<s>[INST] <<SYS>>\n") ?? "<s>[INST] <<SYS>>\n") + systemPrompt +
10
- "\n<</SYS>>\n\n" + prompt + " [/INST]\n\n";
11
- }
12
- else {
13
- return (getTextCompletion(previousCompletionEnd, "</s><s>[INST] ") ?? "<s>[INST] ") + prompt + " [/INST]\n\n";
14
- }
15
- }
16
- getStopStrings() {
17
- return ["</s>"];
18
- }
19
- getDefaultStopString() {
20
- return "</s>";
21
- }
22
- }
23
- //# sourceMappingURL=LlamaChatPromptWrapper.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"LlamaChatPromptWrapper.js","sourceRoot":"","sources":["../../src/chatWrappers/LlamaChatPromptWrapper.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,iBAAiB,EAAC,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAC,iBAAiB,EAAC,MAAM,+BAA+B,CAAC;AAEhE,mEAAmE;AACnE,MAAM,OAAO,sBAAuB,SAAQ,iBAAiB;IACzC,WAAW,GAAW,WAAW,CAAC;IAElC,UAAU,CAAC,MAAc,EAAE,EAAC,YAAY,EAAE,WAAW,EAAE,cAAc,EAAE,oBAAoB,EAE1G;QACG,MAAM,qBAAqB,GAAG,CAAC,cAAc,IAAI,EAAE,CAAC,GAAG,CAAC,oBAAoB,IAAI,EAAE,CAAC,CAAC;QAEpF,IAAI,WAAW,KAAK,CAAC,IAAI,YAAY,IAAI,EAAE,EAAE;YACzC,OAAO,CAAC,iBAAiB,CAAC,qBAAqB,EAAE,qBAAqB,CAAC,IAAI,qBAAqB,CAAC,GAAG,YAAY;gBAC5G,gBAAgB,GAAG,MAAM,GAAG,cAAc,CAAC;SAClD;aAAM;YACH,OAAO,CAAC,iBAAiB,CAAC,qBAAqB,EAAE,gBAAgB,CAAC,IAAI,YAAY,CAAC,GAAG,MAAM,GAAG,cAAc,CAAC;SACjH;IACL,CAAC;IAEe,cAAc;QAC1B,OAAO,CAAC,MAAM,CAAC,CAAC;IACpB,CAAC;IAEe,oBAAoB;QAChC,OAAO,MAAM,CAAC;IAClB,CAAC;CACJ"}
@@ -1,2 +0,0 @@
1
- import { LlamaChatPromptWrapper } from "./LlamaChatPromptWrapper.js";
2
- export declare function getChatWrapperByBos(bos: string | undefined | null): typeof LlamaChatPromptWrapper | null;
@@ -1,14 +0,0 @@
1
- import { LlamaChatPromptWrapper } from "./LlamaChatPromptWrapper.js";
2
- import { ChatMLChatPromptWrapper } from "./ChatMLChatPromptWrapper.js";
3
- export function getChatWrapperByBos(bos) {
4
- if (bos === "" || bos == null)
5
- return null;
6
- if ("<s>[INST] <<SYS>>\n".startsWith(bos)) {
7
- return LlamaChatPromptWrapper;
8
- }
9
- else if ("<|im_start|>system\n".startsWith(bos)) {
10
- return ChatMLChatPromptWrapper;
11
- }
12
- return null;
13
- }
14
- //# sourceMappingURL=createChatWrapperByBos.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"createChatWrapperByBos.js","sourceRoot":"","sources":["../../src/chatWrappers/createChatWrapperByBos.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,sBAAsB,EAAC,MAAM,6BAA6B,CAAC;AACnE,OAAO,EAAC,uBAAuB,EAAC,MAAM,8BAA8B,CAAC;AAErE,MAAM,UAAU,mBAAmB,CAAC,GAA8B;IAC9D,IAAI,GAAG,KAAK,EAAE,IAAI,GAAG,IAAI,IAAI;QACzB,OAAO,IAAI,CAAC;IAEhB,IAAI,qBAAqB,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QACvC,OAAO,sBAAsB,CAAC;KACjC;SAAM,IAAI,sBAAsB,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QAC/C,OAAO,uBAAuB,CAAC;KAClC;IAED,OAAO,IAAI,CAAC;AAChB,CAAC"}
@@ -1,23 +0,0 @@
1
- import { ChatPromptWrapper } from "../ChatPromptWrapper.js";
2
- import { ConversationInteraction } from "../types.js";
3
- /**
4
- * Generate context text to load into a model context from a conversation history.
5
- * @param {ChatPromptWrapper} chatPromptWrapper
6
- * @param {ConversationInteraction[]} conversationHistory
7
- * @param {object} [options]
8
- * @param {string} [options.systemPrompt]
9
- * @param {number} [options.currentPromptIndex]
10
- * @param {string | null} [options.lastStopString]
11
- * @param {string | null} [options.lastStopStringSuffix]
12
- * @returns {{text: string, stopString: (string | null), stopStringSuffix: (string | null)}}
13
- */
14
- export declare function generateContextTextFromConversationHistory(chatPromptWrapper: ChatPromptWrapper, conversationHistory: readonly ConversationInteraction[], { systemPrompt, currentPromptIndex, lastStopString, lastStopStringSuffix }?: {
15
- systemPrompt?: string;
16
- currentPromptIndex?: number;
17
- lastStopString?: string | null;
18
- lastStopStringSuffix?: string | null;
19
- }): {
20
- text: string;
21
- stopString: string | null;
22
- stopStringSuffix: string | null;
23
- };
@@ -1,47 +0,0 @@
1
- import { defaultChatSystemPrompt } from "../config.js";
2
- /**
3
- * Generate context text to load into a model context from a conversation history.
4
- * @param {ChatPromptWrapper} chatPromptWrapper
5
- * @param {ConversationInteraction[]} conversationHistory
6
- * @param {object} [options]
7
- * @param {string} [options.systemPrompt]
8
- * @param {number} [options.currentPromptIndex]
9
- * @param {string | null} [options.lastStopString]
10
- * @param {string | null} [options.lastStopStringSuffix]
11
- * @returns {{text: string, stopString: (string | null), stopStringSuffix: (string | null)}}
12
- */
13
- export function generateContextTextFromConversationHistory(chatPromptWrapper, conversationHistory, { systemPrompt = defaultChatSystemPrompt, currentPromptIndex = 0, lastStopString = null, lastStopStringSuffix = null } = {}) {
14
- let res = "";
15
- for (let i = 0; i < conversationHistory.length; i++) {
16
- const interaction = conversationHistory[i];
17
- const wrappedPrompt = chatPromptWrapper.wrapPrompt(interaction.prompt, {
18
- systemPrompt,
19
- promptIndex: currentPromptIndex,
20
- lastStopString,
21
- lastStopStringSuffix
22
- });
23
- const stopStrings = chatPromptWrapper.getStopStrings();
24
- const defaultStopString = chatPromptWrapper.getDefaultStopString();
25
- const stopStringsToCheckInResponse = new Set([...stopStrings, defaultStopString]);
26
- currentPromptIndex++;
27
- lastStopString = null;
28
- lastStopStringSuffix = null;
29
- res += wrappedPrompt;
30
- for (const stopString of stopStringsToCheckInResponse) {
31
- if (interaction.response.includes(stopString)) {
32
- console.error(`Stop string "${stopString}" was found in model response of conversation interaction index ${i}`, { interaction, stopString });
33
- throw new Error("A stop string cannot be in a conversation history interaction model response");
34
- }
35
- }
36
- res += interaction.response;
37
- res += defaultStopString;
38
- lastStopString = defaultStopString;
39
- lastStopStringSuffix = "";
40
- }
41
- return {
42
- text: res,
43
- stopString: lastStopString,
44
- stopStringSuffix: lastStopStringSuffix
45
- };
46
- }
47
- //# sourceMappingURL=generateContextTextFromConversationHistory.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"generateContextTextFromConversationHistory.js","sourceRoot":"","sources":["../../src/chatWrappers/generateContextTextFromConversationHistory.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,uBAAuB,EAAC,MAAM,cAAc,CAAC;AAIrD;;;;;;;;;;GAUG;AACH,MAAM,UAAU,0CAA0C,CACtD,iBAAoC,EACpC,mBAAuD,EACvD,EACI,YAAY,GAAG,uBAAuB,EAAE,kBAAkB,GAAG,CAAC,EAAE,cAAc,GAAG,IAAI,EAAE,oBAAoB,GAAG,IAAI,KAGlH,EAAE;IAMN,IAAI,GAAG,GAAG,EAAE,CAAC;IAEb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,mBAAmB,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACjD,MAAM,WAAW,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAAC;QAC3C,MAAM,aAAa,GAAG,iBAAiB,CAAC,UAAU,CAAC,WAAW,CAAC,MAAM,EAAE;YACnE,YAAY;YACZ,WAAW,EAAE,kBAAkB;YAC/B,cAAc;YACd,oBAAoB;SACvB,CAAC,CAAC;QACH,MAAM,WAAW,GAAG,iBAAiB,CAAC,cAAc,EAAE,CAAC;QACvD,MAAM,iBAAiB,GAAG,iBAAiB,CAAC,oBAAoB,EAAE,CAAC;QACnE,MAAM,4BAA4B,GAAG,IAAI,GAAG,CAAC,CAAC,GAAG,WAAW,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAElF,kBAAkB,EAAE,CAAC;QACrB,cAAc,GAAG,IAAI,CAAC;QACtB,oBAAoB,GAAG,IAAI,CAAC;QAE5B,GAAG,IAAI,aAAa,CAAC;QAErB,KAAK,MAAM,UAAU,IAAI,4BAA4B,EAAE;YACnD,IAAI,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;gBAC3C,OAAO,CAAC,KAAK,CACT,gBAAgB,UAAU,mEAAmE,CAAC,EAAE,EAChG,EAAC,WAAW,EAAE,UAAU,EAAC,CAC5B,CAAC;gBACF,MAAM,IAAI,KAAK,CAAC,8EAA8E,CAAC,CAAC;aACnG;SACJ;QAED,GAAG,IAAI,WAAW,CAAC,QAAQ,CAAC;QAC5B,GAAG,IAAI,iBAAiB,CAAC;QACzB,cAAc,GAAG,iBAAiB,CAAC;QACnC,oBAAoB,GAAG,EAAE,CAAC;KAC7B;IAED,OAAO;QACH,IAAI,EAAE,GAAG;QACT,UAAU,EAAE,cAAc;QAC1B,gBAAgB,EAAE,oBAAoB;KACzC,CAAC;AACN,CAAC"}
@@ -1,122 +0,0 @@
1
- /// <reference types="node" />
2
- import { ChatPromptWrapper } from "../ChatPromptWrapper.js";
3
- import { ConversationInteraction, Token } from "../types.js";
4
- import { LlamaContext } from "./LlamaContext.js";
5
- import { LlamaGrammar } from "./LlamaGrammar.js";
6
- export type LlamaChatSessionOptions = {
7
- context: LlamaContext;
8
- printLLamaSystemInfo?: boolean;
9
- /** GeneralChatPromptWrapper is ued by default */
10
- promptWrapper?: ChatPromptWrapper | "auto";
11
- systemPrompt?: string;
12
- /** Conversation history to load into the context to continue an existing conversation */
13
- conversationHistory?: readonly ConversationInteraction[];
14
- };
15
- export type LLamaChatPromptOptions = {
16
- onToken?: (tokens: Token[]) => void;
17
- signal?: AbortSignal;
18
- maxTokens?: number;
19
- /**
20
- * Temperature is a hyperparameter that controls the randomness of the generated text.
21
- * It affects the probability distribution of the model's output tokens.
22
- * A higher temperature (e.g., 1.5) makes the output more random and creative,
23
- * while a lower temperature (e.g., 0.5) makes the output more focused, deterministic, and conservative.
24
- * The suggested temperature is 0.8, which provides a balance between randomness and determinism.
25
- * At the extreme, a temperature of 0 will always pick the most likely next token, leading to identical outputs in each run.
26
- *
27
- * Set to `0` to disable.
28
- * Disabled by default (set to `0`).
29
- */
30
- temperature?: number;
31
- /**
32
- * Limits the model to consider only the K most likely next tokens for sampling at each step of sequence generation.
33
- * An integer number between `1` and the size of the vocabulary.
34
- * Set to `0` to disable (which uses the full vocabulary).
35
- *
36
- * Only relevant when `temperature` is set to a value greater than 0.
37
- */
38
- topK?: number;
39
- /**
40
- * Dynamically selects the smallest set of tokens whose cumulative probability exceeds the threshold P,
41
- * and samples the next token only from this set.
42
- * A float number between `0` and `1`.
43
- * Set to `1` to disable.
44
- *
45
- * Only relevant when `temperature` is set to a value greater than `0`.
46
- */
47
- topP?: number;
48
- grammar?: LlamaGrammar;
49
- /**
50
- * Trim whitespace from the end of the generated text
51
- * Disabled by default.
52
- */
53
- trimWhitespaceSuffix?: boolean;
54
- repeatPenalty?: false | LlamaChatSessionRepeatPenalty;
55
- };
56
- export type LlamaChatSessionRepeatPenalty = {
57
- /**
58
- * Number of recent tokens generated by the model to apply penalties to repetition of.
59
- * Defaults to `64`.
60
- */
61
- lastTokens?: number;
62
- punishTokensFilter?: (tokens: Token[]) => Token[];
63
- /**
64
- * Penalize new line tokens.
65
- * Enabled by default.
66
- */
67
- penalizeNewLine?: boolean;
68
- /**
69
- * The relative amount to lower the probability of the tokens in `punishTokens` by
70
- * Defaults to `1.1`.
71
- * Set to `1` to disable.
72
- */
73
- penalty?: number;
74
- /**
75
- * For n time a token is in the `punishTokens` array, lower its probability by `n * frequencyPenalty`
76
- * Disabled by default (`0`).
77
- * Set to a value between `0` and `1` to enable.
78
- */
79
- frequencyPenalty?: number;
80
- /**
81
- * Lower the probability of all the tokens in the `punishTokens` array by `presencePenalty`
82
- * Disabled by default (`0`).
83
- * Set to a value between `0` and `1` to enable.
84
- */
85
- presencePenalty?: number;
86
- };
87
- export declare class LlamaChatSession {
88
- private readonly _systemPrompt;
89
- private readonly _printLLamaSystemInfo;
90
- private readonly _promptWrapper;
91
- private _promptIndex;
92
- private _initialized;
93
- private _lastStopString;
94
- private _lastStopStringSuffix;
95
- private _conversationHistoryToLoad;
96
- private readonly _ctx;
97
- /**
98
- * @param {LlamaChatSessionOptions} options
99
- */
100
- constructor({ context, printLLamaSystemInfo, promptWrapper, systemPrompt, conversationHistory }: LlamaChatSessionOptions);
101
- get initialized(): boolean;
102
- get context(): LlamaContext;
103
- init(): Promise<void>;
104
- /**
105
- * @param {string} prompt
106
- * @param {object} options
107
- * @returns {Promise<string>}
108
- */
109
- prompt(prompt: string, { onToken, signal, maxTokens, temperature, topK, topP, grammar, trimWhitespaceSuffix, repeatPenalty }?: LLamaChatPromptOptions): Promise<string>;
110
- /**
111
- * @param {string} prompt
112
- * @param {LLamaChatPromptOptions} options
113
- */
114
- promptWithMeta(prompt: string, { onToken, signal, maxTokens, temperature, topK, topP, grammar, trimWhitespaceSuffix, repeatPenalty }?: LLamaChatPromptOptions): Promise<{
115
- text: string;
116
- stopReason: "maxTokens" | "eosToken" | "stopString";
117
- stopString: string | null;
118
- stopStringSuffix: string | null;
119
- }>;
120
- private _evalTokens;
121
- private _checkStopString;
122
- }