modelfusion 0.92.1 → 0.93.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (161) hide show
  1. package/README.md +19 -19
  2. package/model-function/{PromptFormat.d.ts → PromptTemplate.d.ts} +2 -2
  3. package/model-function/generate-image/ImageGenerationModel.d.ts +2 -2
  4. package/model-function/generate-image/{PromptFormatImageGenerationModel.cjs → PromptTemplateImageGenerationModel.cjs} +11 -11
  5. package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +20 -0
  6. package/model-function/generate-image/{PromptFormatImageGenerationModel.js → PromptTemplateImageGenerationModel.js} +9 -9
  7. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +6 -6
  8. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +4 -4
  9. package/model-function/generate-structure/StructureFromTextGenerationModel.js +6 -6
  10. package/model-function/generate-structure/{StructureFromTextPromptFormat.d.ts → StructureFromTextPromptTemplate.d.ts} +1 -1
  11. package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +4 -4
  12. package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +2 -2
  13. package/model-function/generate-structure/StructureFromTextStreamingModel.js +4 -4
  14. package/model-function/generate-structure/index.cjs +1 -1
  15. package/model-function/generate-structure/index.d.ts +1 -1
  16. package/model-function/generate-structure/index.js +1 -1
  17. package/model-function/generate-structure/jsonStructurePrompt.d.ts +2 -2
  18. package/model-function/generate-text/{PromptFormatTextGenerationModel.cjs → PromptTemplateTextGenerationModel.cjs} +21 -21
  19. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +35 -0
  20. package/model-function/generate-text/{PromptFormatTextGenerationModel.js → PromptTemplateTextGenerationModel.js} +19 -19
  21. package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +38 -0
  22. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +16 -0
  23. package/model-function/generate-text/PromptTemplateTextStreamingModel.js +34 -0
  24. package/model-function/generate-text/TextGenerationModel.d.ts +3 -3
  25. package/model-function/generate-text/TextGenerationPromptTemplate.d.ts +11 -0
  26. package/model-function/generate-text/index.cjs +4 -4
  27. package/model-function/generate-text/index.d.ts +4 -4
  28. package/model-function/generate-text/index.js +4 -4
  29. package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.cjs → prompt-template/AlpacaPromptTemplate.cjs} +5 -2
  30. package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.d.ts → prompt-template/AlpacaPromptTemplate.d.ts} +5 -5
  31. package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.js → prompt-template/AlpacaPromptTemplate.js} +5 -2
  32. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.test.cjs +31 -0
  33. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.test.js +29 -0
  34. package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.cjs → prompt-template/ChatMLPromptTemplate.cjs} +5 -5
  35. package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.d.ts → prompt-template/ChatMLPromptTemplate.d.ts} +7 -7
  36. package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.js → prompt-template/ChatMLPromptTemplate.js} +5 -5
  37. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.cjs +49 -0
  38. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.js +47 -0
  39. package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.d.ts +1 -1
  40. package/model-function/generate-text/prompt-template/Content.js +1 -0
  41. package/model-function/generate-text/{prompt-format → prompt-template}/InstructionPrompt.d.ts +7 -0
  42. package/model-function/generate-text/prompt-template/InstructionPrompt.js +1 -0
  43. package/model-function/generate-text/{prompt-format/Llama2PromptFormat.cjs → prompt-template/Llama2PromptTemplate.cjs} +8 -7
  44. package/model-function/generate-text/{prompt-format/Llama2PromptFormat.d.ts → prompt-template/Llama2PromptTemplate.d.ts} +7 -6
  45. package/model-function/generate-text/{prompt-format/Llama2PromptFormat.js → prompt-template/Llama2PromptTemplate.js} +8 -7
  46. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.cjs +49 -0
  47. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.d.ts +1 -0
  48. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.js +47 -0
  49. package/model-function/generate-text/{prompt-format/TextPromptFormat.cjs → prompt-template/TextPromptTemplate.cjs} +3 -0
  50. package/model-function/generate-text/{prompt-format/TextPromptFormat.d.ts → prompt-template/TextPromptTemplate.d.ts} +4 -4
  51. package/model-function/generate-text/{prompt-format/TextPromptFormat.js → prompt-template/TextPromptTemplate.js} +3 -0
  52. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.cjs +49 -0
  53. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.d.ts +1 -0
  54. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.js +47 -0
  55. package/model-function/generate-text/{prompt-format/VicunaPromptFormat.d.ts → prompt-template/VicunaPromptTemplate.d.ts} +2 -2
  56. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +21 -0
  57. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.d.ts +1 -0
  58. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +19 -0
  59. package/model-function/generate-text/{prompt-format → prompt-template}/index.cjs +6 -6
  60. package/model-function/generate-text/prompt-template/index.d.ts +10 -0
  61. package/model-function/generate-text/prompt-template/index.js +10 -0
  62. package/model-function/index.cjs +2 -2
  63. package/model-function/index.d.ts +2 -2
  64. package/model-function/index.js +2 -2
  65. package/model-provider/anthropic/{AnthropicPromptFormat.cjs → AnthropicPromptTemplate.cjs} +15 -8
  66. package/model-provider/anthropic/AnthropicPromptTemplate.d.ts +17 -0
  67. package/model-provider/anthropic/{AnthropicPromptFormat.js → AnthropicPromptTemplate.js} +15 -8
  68. package/model-provider/anthropic/AnthropicPromptTemplate.test.cjs +49 -0
  69. package/model-provider/anthropic/AnthropicPromptTemplate.test.d.ts +1 -0
  70. package/model-provider/anthropic/AnthropicPromptTemplate.test.js +47 -0
  71. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +12 -12
  72. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +9 -9
  73. package/model-provider/anthropic/AnthropicTextGenerationModel.js +12 -12
  74. package/model-provider/anthropic/index.cjs +2 -2
  75. package/model-provider/anthropic/index.d.ts +1 -1
  76. package/model-provider/anthropic/index.js +1 -1
  77. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +5 -5
  78. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +4 -4
  79. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +5 -5
  80. package/model-provider/automatic1111/Automatic1111ImageGenerationPrompt.d.ts +2 -2
  81. package/model-provider/cohere/CohereTextGenerationModel.cjs +10 -10
  82. package/model-provider/cohere/CohereTextGenerationModel.d.ts +7 -7
  83. package/model-provider/cohere/CohereTextGenerationModel.js +10 -10
  84. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +4 -4
  85. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +3 -3
  86. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +4 -4
  87. package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.cjs → LlamaCppBakLLaVA1PromptTemplate.cjs} +1 -1
  88. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +11 -0
  89. package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.js → LlamaCppBakLLaVA1PromptTemplate.js} +1 -1
  90. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +12 -12
  91. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +7 -7
  92. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +12 -12
  93. package/model-provider/llamacpp/index.cjs +2 -2
  94. package/model-provider/llamacpp/index.d.ts +1 -1
  95. package/model-provider/llamacpp/index.js +1 -1
  96. package/model-provider/ollama/OllamaTextGenerationModel.cjs +9 -9
  97. package/model-provider/ollama/OllamaTextGenerationModel.d.ts +7 -7
  98. package/model-provider/ollama/OllamaTextGenerationModel.js +9 -9
  99. package/model-provider/openai/OpenAICompletionModel.cjs +10 -10
  100. package/model-provider/openai/OpenAICompletionModel.d.ts +7 -7
  101. package/model-provider/openai/OpenAICompletionModel.js +10 -10
  102. package/model-provider/openai/OpenAIImageGenerationModel.cjs +4 -4
  103. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +3 -3
  104. package/model-provider/openai/OpenAIImageGenerationModel.js +4 -4
  105. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.cjs +16 -16
  106. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts +14 -14
  107. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.js +16 -16
  108. package/model-provider/openai/chat/OpenAIChatMessage.d.ts +1 -1
  109. package/model-provider/openai/chat/OpenAIChatModel.cjs +15 -15
  110. package/model-provider/openai/chat/OpenAIChatModel.d.ts +12 -12
  111. package/model-provider/openai/chat/OpenAIChatModel.js +15 -15
  112. package/model-provider/openai/chat/{OpenAIChatPromptFormat.cjs → OpenAIChatPromptTemplate.cjs} +1 -1
  113. package/model-provider/openai/chat/OpenAIChatPromptTemplate.d.ts +20 -0
  114. package/model-provider/openai/chat/{OpenAIChatPromptFormat.js → OpenAIChatPromptTemplate.js} +1 -1
  115. package/model-provider/openai/index.cjs +2 -2
  116. package/model-provider/openai/index.d.ts +1 -1
  117. package/model-provider/openai/index.js +1 -1
  118. package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +14 -14
  119. package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +11 -11
  120. package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +14 -14
  121. package/model-provider/stability/StabilityImageGenerationModel.cjs +5 -5
  122. package/model-provider/stability/StabilityImageGenerationModel.d.ts +4 -4
  123. package/model-provider/stability/StabilityImageGenerationModel.js +5 -5
  124. package/model-provider/stability/StabilityImageGenerationPrompt.d.ts +2 -2
  125. package/package.json +1 -1
  126. package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +2 -2
  127. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.cjs +6 -6
  128. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +4 -4
  129. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js +6 -6
  130. package/model-function/generate-image/PromptFormatImageGenerationModel.d.ts +0 -20
  131. package/model-function/generate-text/PromptFormatTextGenerationModel.d.ts +0 -35
  132. package/model-function/generate-text/PromptFormatTextStreamingModel.cjs +0 -38
  133. package/model-function/generate-text/PromptFormatTextStreamingModel.d.ts +0 -16
  134. package/model-function/generate-text/PromptFormatTextStreamingModel.js +0 -34
  135. package/model-function/generate-text/TextGenerationPromptFormat.d.ts +0 -11
  136. package/model-function/generate-text/prompt-format/index.d.ts +0 -10
  137. package/model-function/generate-text/prompt-format/index.js +0 -10
  138. package/model-provider/anthropic/AnthropicPromptFormat.d.ts +0 -17
  139. package/model-provider/llamacpp/LlamaCppBakLLaVA1Format.d.ts +0 -11
  140. package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +0 -20
  141. /package/model-function/{PromptFormat.cjs → PromptTemplate.cjs} +0 -0
  142. /package/model-function/{PromptFormat.js → PromptTemplate.js} +0 -0
  143. /package/model-function/generate-structure/{StructureFromTextPromptFormat.cjs → StructureFromTextPromptTemplate.cjs} +0 -0
  144. /package/model-function/generate-structure/{StructureFromTextPromptFormat.js → StructureFromTextPromptTemplate.js} +0 -0
  145. /package/model-function/generate-text/{TextGenerationPromptFormat.cjs → TextGenerationPromptTemplate.cjs} +0 -0
  146. /package/model-function/generate-text/{TextGenerationPromptFormat.js → TextGenerationPromptTemplate.js} +0 -0
  147. /package/model-function/generate-text/{prompt-format/Content.js → prompt-template/AlpacaPromptTemplate.test.d.ts} +0 -0
  148. /package/model-function/generate-text/{prompt-format/InstructionPrompt.js → prompt-template/ChatMLPromptTemplate.test.d.ts} +0 -0
  149. /package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.cjs +0 -0
  150. /package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.js +0 -0
  151. /package/model-function/generate-text/{prompt-format → prompt-template}/Content.cjs +0 -0
  152. /package/model-function/generate-text/{prompt-format → prompt-template}/Content.d.ts +0 -0
  153. /package/model-function/generate-text/{prompt-format → prompt-template}/InstructionPrompt.cjs +0 -0
  154. /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.cjs +0 -0
  155. /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.d.ts +0 -0
  156. /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.js +0 -0
  157. /package/model-function/generate-text/{prompt-format/VicunaPromptFormat.cjs → prompt-template/VicunaPromptTemplate.cjs} +0 -0
  158. /package/model-function/generate-text/{prompt-format/VicunaPromptFormat.js → prompt-template/VicunaPromptTemplate.js} +0 -0
  159. /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.cjs +0 -0
  160. /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.d.ts +0 -0
  161. /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.js +0 -0
@@ -4,9 +4,9 @@ exports.chat = exports.instruction = exports.text = void 0;
4
4
  const ChatPrompt_js_1 = require("./ChatPrompt.cjs");
5
5
  // see https://github.com/facebookresearch/llama/blob/6c7fe276574e78057f917549435a2554000a876d/llama/generation.py#L44
6
6
  const BEGIN_SEGMENT = "<s>";
7
- const END_SEGMENT = "</s>";
8
- const BEGIN_INSTRUCTION = "[INST]";
9
- const END_INSTRUCTION = "[/INST]";
7
+ const END_SEGMENT = " </s>";
8
+ const BEGIN_INSTRUCTION = "[INST] ";
9
+ const END_INSTRUCTION = " [/INST] ";
10
10
  const BEGIN_SYSTEM = "<<SYS>>\n";
11
11
  const END_SYSTEM = "\n<</SYS>>\n\n";
12
12
  /**
@@ -36,8 +36,9 @@ exports.text = text;
36
36
  * <s>[INST] <<SYS>>
37
37
  * ${ system prompt }
38
38
  * <</SYS>>
39
- *
40
- * { instruction } [/INST]
39
+ * ${ instruction }
40
+ * [/INST]
41
+ * ${ response prefix }
41
42
  * ```
42
43
  *
43
44
  * @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
@@ -47,8 +48,8 @@ function instruction() {
47
48
  stopSequences: [END_SEGMENT],
48
49
  format(prompt) {
49
50
  return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt.system != null
50
- ? ` ${BEGIN_SYSTEM}${prompt.system}${END_SYSTEM}`
51
- : ""}${prompt.instruction}${END_INSTRUCTION}\n`;
51
+ ? `${BEGIN_SYSTEM}${prompt.system}${END_SYSTEM}`
52
+ : ""}${prompt.instruction}${END_INSTRUCTION}${prompt.responsePrefix ?? ""}`;
52
53
  },
53
54
  };
54
55
  }
@@ -1,4 +1,4 @@
1
- import { TextGenerationPromptFormat } from "../TextGenerationPromptFormat.js";
1
+ import { TextGenerationPromptTemplate } from "../TextGenerationPromptTemplate.js";
2
2
  import { TextChatPrompt } from "./ChatPrompt.js";
3
3
  import { TextInstructionPrompt } from "./InstructionPrompt.js";
4
4
  /**
@@ -11,7 +11,7 @@ import { TextInstructionPrompt } from "./InstructionPrompt.js";
11
11
  *
12
12
  * @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
13
13
  */
14
- export declare function text(): TextGenerationPromptFormat<string, string>;
14
+ export declare function text(): TextGenerationPromptTemplate<string, string>;
15
15
  /**
16
16
  * Formats an instruction prompt as a Llama 2 prompt.
17
17
  *
@@ -20,13 +20,14 @@ export declare function text(): TextGenerationPromptFormat<string, string>;
20
20
  * <s>[INST] <<SYS>>
21
21
  * ${ system prompt }
22
22
  * <</SYS>>
23
- *
24
- * { instruction } [/INST]
23
+ * ${ instruction }
24
+ * [/INST]
25
+ * ${ response prefix }
25
26
  * ```
26
27
  *
27
28
  * @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
28
29
  */
29
- export declare function instruction(): TextGenerationPromptFormat<TextInstructionPrompt, string>;
30
+ export declare function instruction(): TextGenerationPromptTemplate<TextInstructionPrompt, string>;
30
31
  /**
31
32
  * Formats a chat prompt as a Llama 2 prompt.
32
33
  *
@@ -39,4 +40,4 @@ export declare function instruction(): TextGenerationPromptFormat<TextInstructio
39
40
  * ${ user msg 1 } [/INST] ${ model response 1 } </s><s>[INST] ${ user msg 2 } [/INST] ${ model response 2 } </s><s>[INST] ${ user msg 3 } [/INST]
40
41
  * ```
41
42
  */
42
- export declare function chat(): TextGenerationPromptFormat<TextChatPrompt, string>;
43
+ export declare function chat(): TextGenerationPromptTemplate<TextChatPrompt, string>;
@@ -1,9 +1,9 @@
1
1
  import { validateChatPrompt } from "./ChatPrompt.js";
2
2
  // see https://github.com/facebookresearch/llama/blob/6c7fe276574e78057f917549435a2554000a876d/llama/generation.py#L44
3
3
  const BEGIN_SEGMENT = "<s>";
4
- const END_SEGMENT = "</s>";
5
- const BEGIN_INSTRUCTION = "[INST]";
6
- const END_INSTRUCTION = "[/INST]";
4
+ const END_SEGMENT = " </s>";
5
+ const BEGIN_INSTRUCTION = "[INST] ";
6
+ const END_INSTRUCTION = " [/INST] ";
7
7
  const BEGIN_SYSTEM = "<<SYS>>\n";
8
8
  const END_SYSTEM = "\n<</SYS>>\n\n";
9
9
  /**
@@ -32,8 +32,9 @@ export function text() {
32
32
  * <s>[INST] <<SYS>>
33
33
  * ${ system prompt }
34
34
  * <</SYS>>
35
- *
36
- * { instruction } [/INST]
35
+ * ${ instruction }
36
+ * [/INST]
37
+ * ${ response prefix }
37
38
  * ```
38
39
  *
39
40
  * @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
@@ -43,8 +44,8 @@ export function instruction() {
43
44
  stopSequences: [END_SEGMENT],
44
45
  format(prompt) {
45
46
  return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt.system != null
46
- ? ` ${BEGIN_SYSTEM}${prompt.system}${END_SYSTEM}`
47
- : ""}${prompt.instruction}${END_INSTRUCTION}\n`;
47
+ ? `${BEGIN_SYSTEM}${prompt.system}${END_SYSTEM}`
48
+ : ""}${prompt.instruction}${END_INSTRUCTION}${prompt.responsePrefix ?? ""}`;
48
49
  },
49
50
  };
50
51
  }
@@ -0,0 +1,49 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const Llama2PromptTemplate_js_1 = require("./Llama2PromptTemplate.cjs");
4
+ describe("text prompt", () => {
5
+ it("should format prompt", () => {
6
+ const prompt = (0, Llama2PromptTemplate_js_1.text)().format("prompt");
7
+ expect(prompt).toMatchSnapshot();
8
+ });
9
+ });
10
+ describe("instruction prompt", () => {
11
+ it("should format prompt with instruction", () => {
12
+ const prompt = (0, Llama2PromptTemplate_js_1.instruction)().format({
13
+ instruction: "instruction",
14
+ });
15
+ expect(prompt).toMatchSnapshot();
16
+ });
17
+ it("should format prompt with system and instruction", () => {
18
+ const prompt = (0, Llama2PromptTemplate_js_1.instruction)().format({
19
+ system: "system",
20
+ instruction: "instruction",
21
+ });
22
+ expect(prompt).toMatchSnapshot();
23
+ });
24
+ it("should format prompt with instruction and response prefix", () => {
25
+ const prompt = (0, Llama2PromptTemplate_js_1.instruction)().format({
26
+ instruction: "instruction",
27
+ responsePrefix: "response prefix",
28
+ });
29
+ expect(prompt).toMatchSnapshot();
30
+ });
31
+ });
32
+ describe("chat prompt", () => {
33
+ it("should format prompt with user message", () => {
34
+ const prompt = (0, Llama2PromptTemplate_js_1.chat)().format({
35
+ messages: [{ role: "user", content: "user message" }],
36
+ });
37
+ expect(prompt).toMatchSnapshot();
38
+ });
39
+ it("should format prompt with user-assistant-user messages", () => {
40
+ const prompt = (0, Llama2PromptTemplate_js_1.chat)().format({
41
+ messages: [
42
+ { role: "user", content: "1st user message" },
43
+ { role: "assistant", content: "assistant message" },
44
+ { role: "user", content: "2nd user message" },
45
+ ],
46
+ });
47
+ expect(prompt).toMatchSnapshot();
48
+ });
49
+ });
@@ -0,0 +1,47 @@
1
+ import { chat, instruction, text } from "./Llama2PromptTemplate.js";
2
+ describe("text prompt", () => {
3
+ it("should format prompt", () => {
4
+ const prompt = text().format("prompt");
5
+ expect(prompt).toMatchSnapshot();
6
+ });
7
+ });
8
+ describe("instruction prompt", () => {
9
+ it("should format prompt with instruction", () => {
10
+ const prompt = instruction().format({
11
+ instruction: "instruction",
12
+ });
13
+ expect(prompt).toMatchSnapshot();
14
+ });
15
+ it("should format prompt with system and instruction", () => {
16
+ const prompt = instruction().format({
17
+ system: "system",
18
+ instruction: "instruction",
19
+ });
20
+ expect(prompt).toMatchSnapshot();
21
+ });
22
+ it("should format prompt with instruction and response prefix", () => {
23
+ const prompt = instruction().format({
24
+ instruction: "instruction",
25
+ responsePrefix: "response prefix",
26
+ });
27
+ expect(prompt).toMatchSnapshot();
28
+ });
29
+ });
30
+ describe("chat prompt", () => {
31
+ it("should format prompt with user message", () => {
32
+ const prompt = chat().format({
33
+ messages: [{ role: "user", content: "user message" }],
34
+ });
35
+ expect(prompt).toMatchSnapshot();
36
+ });
37
+ it("should format prompt with user-assistant-user messages", () => {
38
+ const prompt = chat().format({
39
+ messages: [
40
+ { role: "user", content: "1st user message" },
41
+ { role: "assistant", content: "assistant message" },
42
+ { role: "user", content: "2nd user message" },
43
+ ],
44
+ });
45
+ expect(prompt).toMatchSnapshot();
46
+ });
47
+ });
@@ -21,6 +21,9 @@ const instruction = () => ({
21
21
  text += `${prompt.system}\n\n`;
22
22
  }
23
23
  text += prompt.instruction;
24
+ if (prompt.responsePrefix != null) {
25
+ text += `\n\n${prompt.responsePrefix}`;
26
+ }
24
27
  return text;
25
28
  },
26
29
  });
@@ -1,14 +1,14 @@
1
- import { TextGenerationPromptFormat } from "../TextGenerationPromptFormat.js";
1
+ import { TextGenerationPromptTemplate } from "../TextGenerationPromptTemplate.js";
2
2
  import { TextChatPrompt } from "./ChatPrompt.js";
3
3
  import { TextInstructionPrompt } from "./InstructionPrompt.js";
4
4
  /**
5
5
  * Formats a text prompt as a basic text prompt. Does not change the text prompt in any way.
6
6
  */
7
- export declare const text: () => TextGenerationPromptFormat<string, string>;
7
+ export declare const text: () => TextGenerationPromptTemplate<string, string>;
8
8
  /**
9
9
  * Formats an instruction prompt as a basic text prompt.
10
10
  */
11
- export declare const instruction: () => TextGenerationPromptFormat<TextInstructionPrompt, string>;
11
+ export declare const instruction: () => TextGenerationPromptTemplate<TextInstructionPrompt, string>;
12
12
  /**
13
13
  * Formats a chat prompt as a basic text prompt.
14
14
  *
@@ -20,4 +20,4 @@ export declare const chat: (options?: {
20
20
  user?: string;
21
21
  assistant?: string;
22
22
  system?: string;
23
- }) => TextGenerationPromptFormat<TextChatPrompt, string>;
23
+ }) => TextGenerationPromptTemplate<TextChatPrompt, string>;
@@ -17,6 +17,9 @@ export const instruction = () => ({
17
17
  text += `${prompt.system}\n\n`;
18
18
  }
19
19
  text += prompt.instruction;
20
+ if (prompt.responsePrefix != null) {
21
+ text += `\n\n${prompt.responsePrefix}`;
22
+ }
20
23
  return text;
21
24
  },
22
25
  });
@@ -0,0 +1,49 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const TextPromptTemplate_js_1 = require("./TextPromptTemplate.cjs");
4
+ describe("text prompt", () => {
5
+ it("should format prompt", () => {
6
+ const prompt = (0, TextPromptTemplate_js_1.text)().format("prompt");
7
+ expect(prompt).toMatchSnapshot();
8
+ });
9
+ });
10
+ describe("instruction prompt", () => {
11
+ it("should format prompt with instruction", () => {
12
+ const prompt = (0, TextPromptTemplate_js_1.instruction)().format({
13
+ instruction: "instruction",
14
+ });
15
+ expect(prompt).toMatchSnapshot();
16
+ });
17
+ it("should format prompt with system and instruction", () => {
18
+ const prompt = (0, TextPromptTemplate_js_1.instruction)().format({
19
+ system: "system",
20
+ instruction: "instruction",
21
+ });
22
+ expect(prompt).toMatchSnapshot();
23
+ });
24
+ it("should format prompt with instruction and response prefix", () => {
25
+ const prompt = (0, TextPromptTemplate_js_1.instruction)().format({
26
+ instruction: "instruction",
27
+ responsePrefix: "response prefix",
28
+ });
29
+ expect(prompt).toMatchSnapshot();
30
+ });
31
+ });
32
+ describe("chat prompt", () => {
33
+ it("should format prompt with user message", () => {
34
+ const prompt = (0, TextPromptTemplate_js_1.chat)().format({
35
+ messages: [{ role: "user", content: "user message" }],
36
+ });
37
+ expect(prompt).toMatchSnapshot();
38
+ });
39
+ it("should format prompt with user-assistant-user messages", () => {
40
+ const prompt = (0, TextPromptTemplate_js_1.chat)().format({
41
+ messages: [
42
+ { role: "user", content: "1st user message" },
43
+ { role: "assistant", content: "assistant message" },
44
+ { role: "user", content: "2nd user message" },
45
+ ],
46
+ });
47
+ expect(prompt).toMatchSnapshot();
48
+ });
49
+ });
@@ -0,0 +1,47 @@
1
+ import { chat, instruction, text } from "./TextPromptTemplate.js";
2
+ describe("text prompt", () => {
3
+ it("should format prompt", () => {
4
+ const prompt = text().format("prompt");
5
+ expect(prompt).toMatchSnapshot();
6
+ });
7
+ });
8
+ describe("instruction prompt", () => {
9
+ it("should format prompt with instruction", () => {
10
+ const prompt = instruction().format({
11
+ instruction: "instruction",
12
+ });
13
+ expect(prompt).toMatchSnapshot();
14
+ });
15
+ it("should format prompt with system and instruction", () => {
16
+ const prompt = instruction().format({
17
+ system: "system",
18
+ instruction: "instruction",
19
+ });
20
+ expect(prompt).toMatchSnapshot();
21
+ });
22
+ it("should format prompt with instruction and response prefix", () => {
23
+ const prompt = instruction().format({
24
+ instruction: "instruction",
25
+ responsePrefix: "response prefix",
26
+ });
27
+ expect(prompt).toMatchSnapshot();
28
+ });
29
+ });
30
+ describe("chat prompt", () => {
31
+ it("should format prompt with user message", () => {
32
+ const prompt = chat().format({
33
+ messages: [{ role: "user", content: "user message" }],
34
+ });
35
+ expect(prompt).toMatchSnapshot();
36
+ });
37
+ it("should format prompt with user-assistant-user messages", () => {
38
+ const prompt = chat().format({
39
+ messages: [
40
+ { role: "user", content: "1st user message" },
41
+ { role: "assistant", content: "assistant message" },
42
+ { role: "user", content: "2nd user message" },
43
+ ],
44
+ });
45
+ expect(prompt).toMatchSnapshot();
46
+ });
47
+ });
@@ -1,4 +1,4 @@
1
- import { TextGenerationPromptFormat } from "../TextGenerationPromptFormat.js";
1
+ import { TextGenerationPromptTemplate } from "../TextGenerationPromptTemplate.js";
2
2
  import { TextChatPrompt } from "./ChatPrompt.js";
3
3
  /**
4
4
  * Formats a chat prompt as a Vicuna prompt.
@@ -13,4 +13,4 @@ import { TextChatPrompt } from "./ChatPrompt.js";
13
13
  * ASSISTANT:
14
14
  * ```
15
15
  */
16
- export declare function chat(): TextGenerationPromptFormat<TextChatPrompt, string>;
16
+ export declare function chat(): TextGenerationPromptTemplate<TextChatPrompt, string>;
@@ -0,0 +1,21 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const VicunaPromptTemplate_js_1 = require("./VicunaPromptTemplate.cjs");
4
+ describe("chat prompt", () => {
5
+ it("should format prompt with user message", () => {
6
+ const prompt = (0, VicunaPromptTemplate_js_1.chat)().format({
7
+ messages: [{ role: "user", content: "user message" }],
8
+ });
9
+ expect(prompt).toMatchSnapshot();
10
+ });
11
+ it("should format prompt with user-assistant-user messages", () => {
12
+ const prompt = (0, VicunaPromptTemplate_js_1.chat)().format({
13
+ messages: [
14
+ { role: "user", content: "1st user message" },
15
+ { role: "assistant", content: "assistant message" },
16
+ { role: "user", content: "2nd user message" },
17
+ ],
18
+ });
19
+ expect(prompt).toMatchSnapshot();
20
+ });
21
+ });
@@ -0,0 +1,19 @@
1
+ import { chat } from "./VicunaPromptTemplate.js";
2
+ describe("chat prompt", () => {
3
+ it("should format prompt with user message", () => {
4
+ const prompt = chat().format({
5
+ messages: [{ role: "user", content: "user message" }],
6
+ });
7
+ expect(prompt).toMatchSnapshot();
8
+ });
9
+ it("should format prompt with user-assistant-user messages", () => {
10
+ const prompt = chat().format({
11
+ messages: [
12
+ { role: "user", content: "1st user message" },
13
+ { role: "assistant", content: "assistant message" },
14
+ { role: "user", content: "2nd user message" },
15
+ ],
16
+ });
17
+ expect(prompt).toMatchSnapshot();
18
+ });
19
+ });
@@ -26,14 +26,14 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
26
26
  for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.VicunaPromptFormat = exports.TextPromptFormat = exports.Llama2PromptFormat = exports.ChatMLPromptFormat = exports.AlpacaPromptFormat = void 0;
30
- exports.AlpacaPromptFormat = __importStar(require("./AlpacaPromptFormat.cjs"));
31
- exports.ChatMLPromptFormat = __importStar(require("./ChatMLPromptFormat.cjs"));
29
+ exports.VicunaPrompt = exports.TextPrompt = exports.Llama2Prompt = exports.ChatMLPrompt = exports.AlpacaPrompt = void 0;
30
+ exports.AlpacaPrompt = __importStar(require("./AlpacaPromptTemplate.cjs"));
31
+ exports.ChatMLPrompt = __importStar(require("./ChatMLPromptTemplate.cjs"));
32
32
  __exportStar(require("./ChatPrompt.cjs"), exports);
33
33
  __exportStar(require("./Content.cjs"), exports);
34
34
  __exportStar(require("./InstructionPrompt.cjs"), exports);
35
- exports.Llama2PromptFormat = __importStar(require("./Llama2PromptFormat.cjs"));
35
+ exports.Llama2Prompt = __importStar(require("./Llama2PromptTemplate.cjs"));
36
36
  __exportStar(require("./InvalidPromptError.cjs"), exports);
37
- exports.TextPromptFormat = __importStar(require("./TextPromptFormat.cjs"));
38
- exports.VicunaPromptFormat = __importStar(require("./VicunaPromptFormat.cjs"));
37
+ exports.TextPrompt = __importStar(require("./TextPromptTemplate.cjs"));
38
+ exports.VicunaPrompt = __importStar(require("./VicunaPromptTemplate.cjs"));
39
39
  __exportStar(require("./trimChatPrompt.cjs"), exports);
@@ -0,0 +1,10 @@
1
+ export * as AlpacaPrompt from "./AlpacaPromptTemplate.js";
2
+ export * as ChatMLPrompt from "./ChatMLPromptTemplate.js";
3
+ export * from "./ChatPrompt.js";
4
+ export * from "./Content.js";
5
+ export * from "./InstructionPrompt.js";
6
+ export * as Llama2Prompt from "./Llama2PromptTemplate.js";
7
+ export * from "./InvalidPromptError.js";
8
+ export * as TextPrompt from "./TextPromptTemplate.js";
9
+ export * as VicunaPrompt from "./VicunaPromptTemplate.js";
10
+ export * from "./trimChatPrompt.js";
@@ -0,0 +1,10 @@
1
+ export * as AlpacaPrompt from "./AlpacaPromptTemplate.js";
2
+ export * as ChatMLPrompt from "./ChatMLPromptTemplate.js";
3
+ export * from "./ChatPrompt.js";
4
+ export * from "./Content.js";
5
+ export * from "./InstructionPrompt.js";
6
+ export * as Llama2Prompt from "./Llama2PromptTemplate.js";
7
+ export * from "./InvalidPromptError.js";
8
+ export * as TextPrompt from "./TextPromptTemplate.js";
9
+ export * as VicunaPrompt from "./VicunaPromptTemplate.js";
10
+ export * from "./trimChatPrompt.js";
@@ -19,14 +19,14 @@ __exportStar(require("./Model.cjs"), exports);
19
19
  __exportStar(require("./ModelCallEvent.cjs"), exports);
20
20
  __exportStar(require("./ModelCallMetadata.cjs"), exports);
21
21
  __exportStar(require("./ModelInformation.cjs"), exports);
22
- __exportStar(require("./PromptFormat.cjs"), exports);
22
+ __exportStar(require("./PromptTemplate.cjs"), exports);
23
23
  __exportStar(require("./SuccessfulModelCall.cjs"), exports);
24
24
  __exportStar(require("./embed/EmbeddingEvent.cjs"), exports);
25
25
  __exportStar(require("./embed/EmbeddingModel.cjs"), exports);
26
26
  __exportStar(require("./embed/embed.cjs"), exports);
27
27
  __exportStar(require("./generate-image/ImageGenerationEvent.cjs"), exports);
28
28
  __exportStar(require("./generate-image/ImageGenerationModel.cjs"), exports);
29
- __exportStar(require("./generate-image/PromptFormatImageGenerationModel.cjs"), exports);
29
+ __exportStar(require("./generate-image/PromptTemplateImageGenerationModel.cjs"), exports);
30
30
  __exportStar(require("./generate-image/generateImage.cjs"), exports);
31
31
  __exportStar(require("./generate-speech/index.cjs"), exports);
32
32
  __exportStar(require("./generate-structure/index.cjs"), exports);
@@ -3,14 +3,14 @@ export * from "./Model.js";
3
3
  export * from "./ModelCallEvent.js";
4
4
  export * from "./ModelCallMetadata.js";
5
5
  export * from "./ModelInformation.js";
6
- export * from "./PromptFormat.js";
6
+ export * from "./PromptTemplate.js";
7
7
  export * from "./SuccessfulModelCall.js";
8
8
  export * from "./embed/EmbeddingEvent.js";
9
9
  export * from "./embed/EmbeddingModel.js";
10
10
  export * from "./embed/embed.js";
11
11
  export * from "./generate-image/ImageGenerationEvent.js";
12
12
  export * from "./generate-image/ImageGenerationModel.js";
13
- export * from "./generate-image/PromptFormatImageGenerationModel.js";
13
+ export * from "./generate-image/PromptTemplateImageGenerationModel.js";
14
14
  export * from "./generate-image/generateImage.js";
15
15
  export * from "./generate-speech/index.js";
16
16
  export * from "./generate-structure/index.js";
@@ -3,14 +3,14 @@ export * from "./Model.js";
3
3
  export * from "./ModelCallEvent.js";
4
4
  export * from "./ModelCallMetadata.js";
5
5
  export * from "./ModelInformation.js";
6
- export * from "./PromptFormat.js";
6
+ export * from "./PromptTemplate.js";
7
7
  export * from "./SuccessfulModelCall.js";
8
8
  export * from "./embed/EmbeddingEvent.js";
9
9
  export * from "./embed/EmbeddingModel.js";
10
10
  export * from "./embed/embed.js";
11
11
  export * from "./generate-image/ImageGenerationEvent.js";
12
12
  export * from "./generate-image/ImageGenerationModel.js";
13
- export * from "./generate-image/PromptFormatImageGenerationModel.js";
13
+ export * from "./generate-image/PromptTemplateImageGenerationModel.js";
14
14
  export * from "./generate-image/generateImage.js";
15
15
  export * from "./generate-speech/index.js";
16
16
  export * from "./generate-structure/index.js";
@@ -1,7 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.chat = exports.instruction = exports.text = void 0;
4
- const ChatPrompt_js_1 = require("../../model-function/generate-text/prompt-format/ChatPrompt.cjs");
4
+ const ChatPrompt_js_1 = require("../../model-function/generate-text/prompt-template/ChatPrompt.cjs");
5
+ const HUMAN_PREFIX = "\n\nHuman:";
6
+ const ASSISTANT_PREFIX = "\n\nAssistant:";
5
7
  /**
6
8
  * Formats a text prompt as an Anthropic prompt.
7
9
  */
@@ -9,9 +11,9 @@ function text() {
9
11
  return {
10
12
  format(prompt) {
11
13
  let text = "";
12
- text += "\n\nHuman:";
14
+ text += HUMAN_PREFIX;
13
15
  text += prompt;
14
- text += "\n\nAssistant:";
16
+ text += ASSISTANT_PREFIX;
15
17
  return text;
16
18
  },
17
19
  stopSequences: [],
@@ -25,9 +27,12 @@ function instruction() {
25
27
  return {
26
28
  format(prompt) {
27
29
  let text = prompt.system ?? "";
28
- text += "\n\nHuman:";
30
+ text += HUMAN_PREFIX;
29
31
  text += prompt.instruction;
30
- text += "\n\nAssistant:";
32
+ text += ASSISTANT_PREFIX;
33
+ if (prompt.responsePrefix != null) {
34
+ text += prompt.responsePrefix;
35
+ }
31
36
  return text;
32
37
  },
33
38
  stopSequences: [],
@@ -47,11 +52,13 @@ function chat() {
47
52
  for (const { role, content } of prompt.messages) {
48
53
  switch (role) {
49
54
  case "user": {
50
- text += `\n\nHuman:${content}`;
55
+ text += HUMAN_PREFIX;
56
+ text += content;
51
57
  break;
52
58
  }
53
59
  case "assistant": {
54
- text += `\n\nAssistant:${content}`;
60
+ text += ASSISTANT_PREFIX;
61
+ text += content;
55
62
  break;
56
63
  }
57
64
  default: {
@@ -61,7 +68,7 @@ function chat() {
61
68
  }
62
69
  }
63
70
  // AI message prefix:
64
- text += `\n\nAssistant:`;
71
+ text += ASSISTANT_PREFIX;
65
72
  return text;
66
73
  },
67
74
  stopSequences: [],
@@ -0,0 +1,17 @@
1
+ import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
2
+ import { TextChatPrompt } from "../../model-function/generate-text/prompt-template/ChatPrompt.js";
3
+ import { TextInstructionPrompt } from "../../model-function/generate-text/prompt-template/InstructionPrompt.js";
4
+ /**
5
+ * Formats a text prompt as an Anthropic prompt.
6
+ */
7
+ export declare function text(): TextGenerationPromptTemplate<string, string>;
8
+ /**
9
+ * Formats an instruction prompt as an Anthropic prompt.
10
+ */
11
+ export declare function instruction(): TextGenerationPromptTemplate<TextInstructionPrompt, string>;
12
+ /**
13
+ * Formats a chat prompt as an Anthropic prompt.
14
+ *
15
+ * @see https://docs.anthropic.com/claude/docs/constructing-a-prompt
16
+ */
17
+ export declare function chat(): TextGenerationPromptTemplate<TextChatPrompt, string>;