modelfusion 0.105.0 → 0.107.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (199) hide show
  1. package/CHANGELOG.md +26 -0
  2. package/README.md +16 -59
  3. package/core/DefaultRun.cjs +0 -4
  4. package/core/DefaultRun.d.ts +0 -2
  5. package/core/DefaultRun.js +0 -4
  6. package/core/ExtensionFunctionEvent.d.ts +11 -0
  7. package/core/FunctionEvent.d.ts +2 -2
  8. package/extension/index.cjs +22 -3
  9. package/extension/index.d.ts +5 -1
  10. package/extension/index.js +4 -1
  11. package/index.cjs +0 -3
  12. package/index.d.ts +0 -3
  13. package/index.js +0 -3
  14. package/model-function/generate-structure/jsonStructurePrompt.cjs +42 -6
  15. package/model-function/generate-structure/jsonStructurePrompt.d.ts +12 -1
  16. package/model-function/generate-structure/jsonStructurePrompt.js +42 -5
  17. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +2 -1
  18. package/model-function/generate-text/PromptTemplateTextGenerationModel.js +1 -1
  19. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.cjs +11 -0
  20. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.js +11 -0
  21. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +10 -8
  22. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.d.ts +1 -1
  23. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +10 -8
  24. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.cjs +11 -0
  25. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.js +11 -0
  26. package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.cjs +150 -0
  27. package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.d.ts +62 -0
  28. package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.js +143 -0
  29. package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.cjs +60 -0
  30. package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.js +58 -0
  31. package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.test.cjs +11 -0
  32. package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.test.js +11 -0
  33. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.cjs +11 -0
  34. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.js +11 -0
  35. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +11 -0
  36. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +11 -0
  37. package/model-function/generate-text/prompt-template/index.cjs +2 -1
  38. package/model-function/generate-text/prompt-template/index.d.ts +1 -0
  39. package/model-function/generate-text/prompt-template/index.js +1 -0
  40. package/model-function/index.cjs +0 -1
  41. package/model-function/index.d.ts +0 -1
  42. package/model-function/index.js +0 -1
  43. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
  44. package/model-provider/cohere/CohereTextGenerationModel.d.ts +6 -6
  45. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +3 -3
  46. package/model-provider/llamacpp/{LlamaCppTextGenerationModel.cjs → LlamaCppCompletionModel.cjs} +8 -8
  47. package/model-provider/llamacpp/{LlamaCppTextGenerationModel.d.ts → LlamaCppCompletionModel.d.ts} +49 -49
  48. package/model-provider/llamacpp/{LlamaCppTextGenerationModel.js → LlamaCppCompletionModel.js} +6 -6
  49. package/model-provider/llamacpp/{LlamaCppTextGenerationModel.test.cjs → LlamaCppCompletionModel.test.cjs} +3 -3
  50. package/model-provider/llamacpp/{LlamaCppTextGenerationModel.test.js → LlamaCppCompletionModel.test.js} +3 -3
  51. package/model-provider/llamacpp/LlamaCppFacade.cjs +2 -2
  52. package/model-provider/llamacpp/LlamaCppFacade.d.ts +2 -2
  53. package/model-provider/llamacpp/LlamaCppFacade.js +2 -2
  54. package/model-provider/llamacpp/index.cjs +1 -1
  55. package/model-provider/llamacpp/index.d.ts +1 -1
  56. package/model-provider/llamacpp/index.js +1 -1
  57. package/model-provider/mistral/MistralChatModel.cjs +4 -4
  58. package/model-provider/mistral/MistralChatModel.d.ts +6 -6
  59. package/model-provider/mistral/MistralChatModel.js +1 -1
  60. package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +13 -13
  61. package/model-provider/mistral/index.cjs +3 -3
  62. package/model-provider/mistral/index.d.ts +2 -2
  63. package/model-provider/mistral/index.js +2 -2
  64. package/model-provider/ollama/OllamaChatModel.d.ts +9 -8
  65. package/model-provider/ollama/OllamaChatModel.js +1 -1
  66. package/model-provider/ollama/OllamaCompletionModel.d.ts +2 -1
  67. package/model-provider/ollama/OllamaCompletionModel.js +1 -1
  68. package/model-provider/ollama/OllamaCompletionModel.test.cjs +1 -7
  69. package/model-provider/ollama/OllamaCompletionModel.test.js +1 -7
  70. package/model-provider/openai/AbstractOpenAIChatModel.d.ts +8 -8
  71. package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.d.ts +1 -1
  72. package/model-provider/openai/OpenAICompletionModel.d.ts +6 -6
  73. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +12 -12
  74. package/model-provider/openai/OpenAITranscriptionModel.d.ts +11 -11
  75. package/model-provider/openai/index.cjs +0 -1
  76. package/model-provider/openai/index.d.ts +0 -1
  77. package/model-provider/openai/index.js +0 -1
  78. package/model-provider/stability/StabilityImageGenerationModel.d.ts +5 -5
  79. package/package.json +9 -20
  80. package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +1 -1
  81. package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
  82. package/tool/generate-tool-call/TextGenerationToolCallModel.js +1 -1
  83. package/tool/generate-tool-call/index.cjs +1 -0
  84. package/tool/generate-tool-call/index.d.ts +1 -0
  85. package/tool/generate-tool-call/index.js +1 -0
  86. package/tool/generate-tool-call/jsonToolCallPrompt.cjs +30 -0
  87. package/tool/generate-tool-call/jsonToolCallPrompt.d.ts +5 -0
  88. package/tool/generate-tool-call/jsonToolCallPrompt.js +27 -0
  89. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +1 -11
  90. package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.d.ts +12 -0
  91. package/tool/generate-tool-calls-or-text/index.cjs +1 -0
  92. package/tool/generate-tool-calls-or-text/index.d.ts +1 -0
  93. package/tool/generate-tool-calls-or-text/index.js +1 -0
  94. package/util/index.cjs +0 -1
  95. package/util/index.d.ts +0 -1
  96. package/util/index.js +0 -1
  97. package/browser/MediaSourceAppender.cjs +0 -54
  98. package/browser/MediaSourceAppender.d.ts +0 -11
  99. package/browser/MediaSourceAppender.js +0 -50
  100. package/browser/convertAudioChunksToBase64.cjs +0 -8
  101. package/browser/convertAudioChunksToBase64.d.ts +0 -4
  102. package/browser/convertAudioChunksToBase64.js +0 -4
  103. package/browser/convertBlobToBase64.cjs +0 -23
  104. package/browser/convertBlobToBase64.d.ts +0 -1
  105. package/browser/convertBlobToBase64.js +0 -19
  106. package/browser/index.cjs +0 -22
  107. package/browser/index.d.ts +0 -6
  108. package/browser/index.js +0 -6
  109. package/browser/invokeFlow.cjs +0 -23
  110. package/browser/invokeFlow.d.ts +0 -8
  111. package/browser/invokeFlow.js +0 -19
  112. package/browser/readEventSource.cjs +0 -29
  113. package/browser/readEventSource.d.ts +0 -9
  114. package/browser/readEventSource.js +0 -25
  115. package/browser/readEventSourceStream.cjs +0 -35
  116. package/browser/readEventSourceStream.d.ts +0 -7
  117. package/browser/readEventSourceStream.js +0 -31
  118. package/composed-function/index.cjs +0 -19
  119. package/composed-function/index.d.ts +0 -3
  120. package/composed-function/index.js +0 -3
  121. package/composed-function/summarize/SummarizationFunction.d.ts +0 -4
  122. package/composed-function/summarize/summarizeRecursively.cjs +0 -19
  123. package/composed-function/summarize/summarizeRecursively.d.ts +0 -11
  124. package/composed-function/summarize/summarizeRecursively.js +0 -15
  125. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +0 -25
  126. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +0 -24
  127. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +0 -21
  128. package/cost/Cost.cjs +0 -38
  129. package/cost/Cost.d.ts +0 -16
  130. package/cost/Cost.js +0 -34
  131. package/cost/CostCalculator.d.ts +0 -8
  132. package/cost/calculateCost.cjs +0 -28
  133. package/cost/calculateCost.d.ts +0 -7
  134. package/cost/calculateCost.js +0 -24
  135. package/cost/index.cjs +0 -19
  136. package/cost/index.d.ts +0 -3
  137. package/cost/index.js +0 -3
  138. package/guard/GuardEvent.cjs +0 -2
  139. package/guard/GuardEvent.d.ts +0 -7
  140. package/guard/fixStructure.cjs +0 -75
  141. package/guard/fixStructure.d.ts +0 -64
  142. package/guard/fixStructure.js +0 -71
  143. package/guard/guard.cjs +0 -79
  144. package/guard/guard.d.ts +0 -29
  145. package/guard/guard.js +0 -75
  146. package/guard/index.cjs +0 -19
  147. package/guard/index.d.ts +0 -3
  148. package/guard/index.js +0 -3
  149. package/model-function/SuccessfulModelCall.cjs +0 -10
  150. package/model-function/SuccessfulModelCall.d.ts +0 -12
  151. package/model-function/SuccessfulModelCall.js +0 -6
  152. package/model-provider/openai/OpenAICostCalculator.cjs +0 -89
  153. package/model-provider/openai/OpenAICostCalculator.d.ts +0 -6
  154. package/model-provider/openai/OpenAICostCalculator.js +0 -85
  155. package/server/fastify/AssetStorage.cjs +0 -2
  156. package/server/fastify/AssetStorage.d.ts +0 -17
  157. package/server/fastify/AssetStorage.js +0 -1
  158. package/server/fastify/DefaultFlow.cjs +0 -22
  159. package/server/fastify/DefaultFlow.d.ts +0 -16
  160. package/server/fastify/DefaultFlow.js +0 -18
  161. package/server/fastify/FileSystemAssetStorage.cjs +0 -60
  162. package/server/fastify/FileSystemAssetStorage.d.ts +0 -19
  163. package/server/fastify/FileSystemAssetStorage.js +0 -56
  164. package/server/fastify/FileSystemLogger.cjs +0 -49
  165. package/server/fastify/FileSystemLogger.d.ts +0 -18
  166. package/server/fastify/FileSystemLogger.js +0 -45
  167. package/server/fastify/Flow.cjs +0 -2
  168. package/server/fastify/Flow.d.ts +0 -9
  169. package/server/fastify/Flow.js +0 -1
  170. package/server/fastify/FlowRun.cjs +0 -71
  171. package/server/fastify/FlowRun.d.ts +0 -28
  172. package/server/fastify/FlowRun.js +0 -67
  173. package/server/fastify/FlowSchema.cjs +0 -2
  174. package/server/fastify/FlowSchema.d.ts +0 -5
  175. package/server/fastify/FlowSchema.js +0 -1
  176. package/server/fastify/Logger.cjs +0 -2
  177. package/server/fastify/Logger.d.ts +0 -13
  178. package/server/fastify/Logger.js +0 -1
  179. package/server/fastify/PathProvider.cjs +0 -34
  180. package/server/fastify/PathProvider.d.ts +0 -12
  181. package/server/fastify/PathProvider.js +0 -30
  182. package/server/fastify/index.cjs +0 -24
  183. package/server/fastify/index.d.ts +0 -8
  184. package/server/fastify/index.js +0 -8
  185. package/server/fastify/modelFusionFlowPlugin.cjs +0 -103
  186. package/server/fastify/modelFusionFlowPlugin.d.ts +0 -12
  187. package/server/fastify/modelFusionFlowPlugin.js +0 -99
  188. package/util/getAudioFileExtension.cjs +0 -29
  189. package/util/getAudioFileExtension.d.ts +0 -1
  190. package/util/getAudioFileExtension.js +0 -25
  191. /package/{composed-function/summarize/SummarizationFunction.cjs → core/ExtensionFunctionEvent.cjs} +0 -0
  192. /package/{composed-function/summarize/SummarizationFunction.js → core/ExtensionFunctionEvent.js} +0 -0
  193. /package/{cost/CostCalculator.js → model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.d.ts} +0 -0
  194. /package/{guard/GuardEvent.js → model-provider/llamacpp/LlamaCppCompletionModel.test.d.ts} +0 -0
  195. /package/model-provider/mistral/{MistralPromptTemplate.cjs → MistralChatPromptTemplate.cjs} +0 -0
  196. /package/model-provider/mistral/{MistralPromptTemplate.d.ts → MistralChatPromptTemplate.d.ts} +0 -0
  197. /package/model-provider/mistral/{MistralPromptTemplate.js → MistralChatPromptTemplate.js} +0 -0
  198. /package/{cost/CostCalculator.cjs → tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.cjs} +0 -0
  199. /package/{model-provider/llamacpp/LlamaCppTextGenerationModel.test.d.ts → tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js} +0 -0
@@ -44,4 +44,15 @@ describe("chat prompt", () => {
44
44
  });
45
45
  expect(prompt).toMatchSnapshot();
46
46
  });
47
+ it("should format prompt with system message and user-assistant-user messages", () => {
48
+ const prompt = chat().format({
49
+ system: "you are a chatbot",
50
+ messages: [
51
+ { role: "user", content: "1st user message" },
52
+ { role: "assistant", content: "assistant message" },
53
+ { role: "user", content: "2nd user message" },
54
+ ],
55
+ });
56
+ expect(prompt).toMatchSnapshot();
57
+ });
47
58
  });
@@ -0,0 +1,150 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.validateMistralPrompt = exports.chat = exports.instruction = exports.text = void 0;
4
+ const ContentPart_js_1 = require("./ContentPart.cjs");
5
+ const InvalidPromptError_js_1 = require("./InvalidPromptError.cjs");
6
+ const BEGIN_SEGMENT = "<s>";
7
+ const END_SEGMENT = "</s>";
8
+ const BEGIN_INSTRUCTION = "[INST] ";
9
+ const END_INSTRUCTION = " [/INST] ";
10
+ /**
11
+ * Formats a text prompt as a Mistral instruct prompt.
12
+ *
13
+ * Mistral prompt template:
14
+ * ```
15
+ * <s>[INST] { instruction } [/INST]
16
+ * ```
17
+ *
18
+ * @see https://docs.mistral.ai/models/#chat-template
19
+ */
20
+ function text() {
21
+ return {
22
+ stopSequences: [END_SEGMENT],
23
+ format(prompt) {
24
+ return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt}${END_INSTRUCTION}`;
25
+ },
26
+ };
27
+ }
28
+ exports.text = text;
29
+ /**
30
+ * Formats an instruction prompt as a Mistral instruct prompt.
31
+ *
32
+ * Note that Mistral does not support system prompts. We emulate them.
33
+ *
34
+ * Mistral prompt template when system prompt is set:
35
+ * ```
36
+ * <s>[INST] ${ system prompt } [/INST] </s>[INST] ${instruction} [/INST] ${ response prefix }
37
+ * ```
38
+ *
39
+ * Mistral prompt template when there is no system prompt:
40
+ * ```
41
+ * <s>[INST] ${ instruction } [/INST] ${ response prefix }
42
+ * ```
43
+ *
44
+ * @see https://docs.mistral.ai/models/#chat-template
45
+ */
46
+ function instruction() {
47
+ return {
48
+ stopSequences: [END_SEGMENT],
49
+ format(prompt) {
50
+ const instruction = (0, ContentPart_js_1.validateContentIsString)(prompt.instruction, prompt);
51
+ if (prompt.system != null) {
52
+ return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt.system}${END_INSTRUCTION}${END_SEGMENT}${BEGIN_INSTRUCTION}${instruction}${END_INSTRUCTION}${prompt.responsePrefix ?? ""}`;
53
+ }
54
+ return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction}${END_INSTRUCTION}${prompt.responsePrefix ?? ""}`;
55
+ },
56
+ };
57
+ }
58
+ exports.instruction = instruction;
59
+ /**
60
+ * Formats a chat prompt as a Mistral instruct prompt.
61
+ *
62
+ * Note that Mistral does not support system prompts. We emulate them.
63
+ *
64
+ * Mistral prompt template when system prompt is set:
65
+ * ```
66
+ * <s>[INST] ${ system prompt } [/INST] </s> [INST] ${ user msg 1 } [/INST] ${ model response 1 } [INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
67
+ * ```
68
+ *
69
+ * Mistral prompt template when there is no system prompt:
70
+ * ```
71
+ * <s>[INST] ${ user msg 1 } [/INST] ${ model response 1 } </s>[INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
72
+ * ```
73
+ *
74
+ * @see https://docs.mistral.ai/models/#chat-template
75
+ */
76
+ function chat() {
77
+ return {
78
+ format(prompt) {
79
+ validateMistralPrompt(prompt);
80
+ let text = "";
81
+ let i = 0;
82
+ // handle the special first segment
83
+ if (prompt.system != null) {
84
+ text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt.system}${END_INSTRUCTION}${END_SEGMENT}`;
85
+ }
86
+ else {
87
+ // get content of the first message (validated to be a user message)
88
+ text = `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt.messages[0].content}${END_INSTRUCTION}`;
89
+ // process 2nd message (validated to be an assistant message)
90
+ if (prompt.messages.length > 1) {
91
+ text += `${prompt.messages[1].content}${END_SEGMENT}`;
92
+ }
93
+ i = 2;
94
+ }
95
+ // process remaining messages
96
+ for (; i < prompt.messages.length; i++) {
97
+ const { role, content } = prompt.messages[i];
98
+ switch (role) {
99
+ case "user": {
100
+ const textContent = (0, ContentPart_js_1.validateContentIsString)(content, prompt);
101
+ text += `${BEGIN_INSTRUCTION}${textContent}${END_INSTRUCTION}`;
102
+ break;
103
+ }
104
+ case "assistant": {
105
+ text += (0, ContentPart_js_1.validateContentIsString)(content, prompt);
106
+ break;
107
+ }
108
+ case "tool": {
109
+ throw new InvalidPromptError_js_1.InvalidPromptError("Tool messages are not supported.", prompt);
110
+ }
111
+ default: {
112
+ const _exhaustiveCheck = role;
113
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
114
+ }
115
+ }
116
+ }
117
+ return text;
118
+ },
119
+ stopSequences: [END_SEGMENT],
120
+ };
121
+ }
122
+ exports.chat = chat;
123
+ /**
124
+ * Checks if a Mistral chat prompt is valid. Throws a {@link ChatPromptValidationError} if it's not.
125
+ *
126
+ * - The first message of the chat must be a user message.
127
+ * - Then it must be alternating between an assistant message and a user message.
128
+ * - The last message must always be a user message (when submitting to a model).
129
+ *
130
+ * The type checking is done at runtime when you submit a chat prompt to a model with a prompt template.
131
+ *
132
+ * @throws {@link ChatPromptValidationError}
133
+ */
134
+ function validateMistralPrompt(chatPrompt) {
135
+ const messages = chatPrompt.messages;
136
+ if (messages.length < 1) {
137
+ throw new InvalidPromptError_js_1.InvalidPromptError("ChatPrompt should have at least one message.", chatPrompt);
138
+ }
139
+ for (let i = 0; i < messages.length; i++) {
140
+ const expectedRole = i % 2 === 0 ? "user" : "assistant";
141
+ const role = messages[i].role;
142
+ if (role !== expectedRole) {
143
+ throw new InvalidPromptError_js_1.InvalidPromptError(`Message at index ${i} should have role '${expectedRole}', but has role '${role}'.`, chatPrompt);
144
+ }
145
+ }
146
+ if (messages.length % 2 === 0) {
147
+ throw new InvalidPromptError_js_1.InvalidPromptError("The last message must be a user message.", chatPrompt);
148
+ }
149
+ }
150
+ exports.validateMistralPrompt = validateMistralPrompt;
@@ -0,0 +1,62 @@
1
+ import { TextGenerationPromptTemplate } from "../TextGenerationPromptTemplate.js";
2
+ import { ChatPrompt } from "./ChatPrompt.js";
3
+ import { InstructionPrompt } from "./InstructionPrompt.js";
4
+ /**
5
+ * Formats a text prompt as a Mistral instruct prompt.
6
+ *
7
+ * Mistral prompt template:
8
+ * ```
9
+ * <s>[INST] { instruction } [/INST]
10
+ * ```
11
+ *
12
+ * @see https://docs.mistral.ai/models/#chat-template
13
+ */
14
+ export declare function text(): TextGenerationPromptTemplate<string, string>;
15
+ /**
16
+ * Formats an instruction prompt as a Mistral instruct prompt.
17
+ *
18
+ * Note that Mistral does not support system prompts. We emulate them.
19
+ *
20
+ * Mistral prompt template when system prompt is set:
21
+ * ```
22
+ * <s>[INST] ${ system prompt } [/INST] </s>[INST] ${instruction} [/INST] ${ response prefix }
23
+ * ```
24
+ *
25
+ * Mistral prompt template when there is no system prompt:
26
+ * ```
27
+ * <s>[INST] ${ instruction } [/INST] ${ response prefix }
28
+ * ```
29
+ *
30
+ * @see https://docs.mistral.ai/models/#chat-template
31
+ */
32
+ export declare function instruction(): TextGenerationPromptTemplate<InstructionPrompt, string>;
33
+ /**
34
+ * Formats a chat prompt as a Mistral instruct prompt.
35
+ *
36
+ * Note that Mistral does not support system prompts. We emulate them.
37
+ *
38
+ * Mistral prompt template when system prompt is set:
39
+ * ```
40
+ * <s>[INST] ${ system prompt } [/INST] </s> [INST] ${ user msg 1 } [/INST] ${ model response 1 } [INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
41
+ * ```
42
+ *
43
+ * Mistral prompt template when there is no system prompt:
44
+ * ```
45
+ * <s>[INST] ${ user msg 1 } [/INST] ${ model response 1 } </s>[INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
46
+ * ```
47
+ *
48
+ * @see https://docs.mistral.ai/models/#chat-template
49
+ */
50
+ export declare function chat(): TextGenerationPromptTemplate<ChatPrompt, string>;
51
+ /**
52
+ * Checks if a Mistral chat prompt is valid. Throws a {@link ChatPromptValidationError} if it's not.
53
+ *
54
+ * - The first message of the chat must be a user message.
55
+ * - Then it must be alternating between an assistant message and a user message.
56
+ * - The last message must always be a user message (when submitting to a model).
57
+ *
58
+ * The type checking is done at runtime when you submit a chat prompt to a model with a prompt template.
59
+ *
60
+ * @throws {@link ChatPromptValidationError}
61
+ */
62
+ export declare function validateMistralPrompt(chatPrompt: ChatPrompt): void;
@@ -0,0 +1,143 @@
1
+ import { validateContentIsString } from "./ContentPart.js";
2
+ import { InvalidPromptError } from "./InvalidPromptError.js";
3
+ const BEGIN_SEGMENT = "<s>";
4
+ const END_SEGMENT = "</s>";
5
+ const BEGIN_INSTRUCTION = "[INST] ";
6
+ const END_INSTRUCTION = " [/INST] ";
7
+ /**
8
+ * Formats a text prompt as a Mistral instruct prompt.
9
+ *
10
+ * Mistral prompt template:
11
+ * ```
12
+ * <s>[INST] { instruction } [/INST]
13
+ * ```
14
+ *
15
+ * @see https://docs.mistral.ai/models/#chat-template
16
+ */
17
+ export function text() {
18
+ return {
19
+ stopSequences: [END_SEGMENT],
20
+ format(prompt) {
21
+ return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt}${END_INSTRUCTION}`;
22
+ },
23
+ };
24
+ }
25
+ /**
26
+ * Formats an instruction prompt as a Mistral instruct prompt.
27
+ *
28
+ * Note that Mistral does not support system prompts. We emulate them.
29
+ *
30
+ * Mistral prompt template when system prompt is set:
31
+ * ```
32
+ * <s>[INST] ${ system prompt } [/INST] </s>[INST] ${instruction} [/INST] ${ response prefix }
33
+ * ```
34
+ *
35
+ * Mistral prompt template when there is no system prompt:
36
+ * ```
37
+ * <s>[INST] ${ instruction } [/INST] ${ response prefix }
38
+ * ```
39
+ *
40
+ * @see https://docs.mistral.ai/models/#chat-template
41
+ */
42
+ export function instruction() {
43
+ return {
44
+ stopSequences: [END_SEGMENT],
45
+ format(prompt) {
46
+ const instruction = validateContentIsString(prompt.instruction, prompt);
47
+ if (prompt.system != null) {
48
+ return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt.system}${END_INSTRUCTION}${END_SEGMENT}${BEGIN_INSTRUCTION}${instruction}${END_INSTRUCTION}${prompt.responsePrefix ?? ""}`;
49
+ }
50
+ return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction}${END_INSTRUCTION}${prompt.responsePrefix ?? ""}`;
51
+ },
52
+ };
53
+ }
54
+ /**
55
+ * Formats a chat prompt as a Mistral instruct prompt.
56
+ *
57
+ * Note that Mistral does not support system prompts. We emulate them.
58
+ *
59
+ * Mistral prompt template when system prompt is set:
60
+ * ```
61
+ * <s>[INST] ${ system prompt } [/INST] </s> [INST] ${ user msg 1 } [/INST] ${ model response 1 } [INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
62
+ * ```
63
+ *
64
+ * Mistral prompt template when there is no system prompt:
65
+ * ```
66
+ * <s>[INST] ${ user msg 1 } [/INST] ${ model response 1 } </s>[INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
67
+ * ```
68
+ *
69
+ * @see https://docs.mistral.ai/models/#chat-template
70
+ */
71
+ export function chat() {
72
+ return {
73
+ format(prompt) {
74
+ validateMistralPrompt(prompt);
75
+ let text = "";
76
+ let i = 0;
77
+ // handle the special first segment
78
+ if (prompt.system != null) {
79
+ text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt.system}${END_INSTRUCTION}${END_SEGMENT}`;
80
+ }
81
+ else {
82
+ // get content of the first message (validated to be a user message)
83
+ text = `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt.messages[0].content}${END_INSTRUCTION}`;
84
+ // process 2nd message (validated to be an assistant message)
85
+ if (prompt.messages.length > 1) {
86
+ text += `${prompt.messages[1].content}${END_SEGMENT}`;
87
+ }
88
+ i = 2;
89
+ }
90
+ // process remaining messages
91
+ for (; i < prompt.messages.length; i++) {
92
+ const { role, content } = prompt.messages[i];
93
+ switch (role) {
94
+ case "user": {
95
+ const textContent = validateContentIsString(content, prompt);
96
+ text += `${BEGIN_INSTRUCTION}${textContent}${END_INSTRUCTION}`;
97
+ break;
98
+ }
99
+ case "assistant": {
100
+ text += validateContentIsString(content, prompt);
101
+ break;
102
+ }
103
+ case "tool": {
104
+ throw new InvalidPromptError("Tool messages are not supported.", prompt);
105
+ }
106
+ default: {
107
+ const _exhaustiveCheck = role;
108
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
109
+ }
110
+ }
111
+ }
112
+ return text;
113
+ },
114
+ stopSequences: [END_SEGMENT],
115
+ };
116
+ }
117
+ /**
118
+ * Checks if a Mistral chat prompt is valid. Throws a {@link ChatPromptValidationError} if it's not.
119
+ *
120
+ * - The first message of the chat must be a user message.
121
+ * - Then it must be alternating between an assistant message and a user message.
122
+ * - The last message must always be a user message (when submitting to a model).
123
+ *
124
+ * The type checking is done at runtime when you submit a chat prompt to a model with a prompt template.
125
+ *
126
+ * @throws {@link ChatPromptValidationError}
127
+ */
128
+ export function validateMistralPrompt(chatPrompt) {
129
+ const messages = chatPrompt.messages;
130
+ if (messages.length < 1) {
131
+ throw new InvalidPromptError("ChatPrompt should have at least one message.", chatPrompt);
132
+ }
133
+ for (let i = 0; i < messages.length; i++) {
134
+ const expectedRole = i % 2 === 0 ? "user" : "assistant";
135
+ const role = messages[i].role;
136
+ if (role !== expectedRole) {
137
+ throw new InvalidPromptError(`Message at index ${i} should have role '${expectedRole}', but has role '${role}'.`, chatPrompt);
138
+ }
139
+ }
140
+ if (messages.length % 2 === 0) {
141
+ throw new InvalidPromptError("The last message must be a user message.", chatPrompt);
142
+ }
143
+ }
@@ -0,0 +1,60 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const MistralInstructPromptTemplate_js_1 = require("./MistralInstructPromptTemplate.cjs");
4
+ describe("text prompt", () => {
5
+ it("should format prompt", () => {
6
+ const prompt = (0, MistralInstructPromptTemplate_js_1.text)().format("prompt");
7
+ expect(prompt).toMatchSnapshot();
8
+ });
9
+ });
10
+ describe("instruction prompt", () => {
11
+ it("should format prompt with instruction", () => {
12
+ const prompt = (0, MistralInstructPromptTemplate_js_1.instruction)().format({
13
+ instruction: "instruction",
14
+ });
15
+ expect(prompt).toMatchSnapshot();
16
+ });
17
+ it("should format prompt with system and instruction", () => {
18
+ const prompt = (0, MistralInstructPromptTemplate_js_1.instruction)().format({
19
+ system: "system",
20
+ instruction: "instruction",
21
+ });
22
+ expect(prompt).toMatchSnapshot();
23
+ });
24
+ it("should format prompt with instruction and response prefix", () => {
25
+ const prompt = (0, MistralInstructPromptTemplate_js_1.instruction)().format({
26
+ instruction: "instruction",
27
+ responsePrefix: "response prefix",
28
+ });
29
+ expect(prompt).toMatchSnapshot();
30
+ });
31
+ });
32
+ describe("chat prompt", () => {
33
+ it("should format prompt with user message", () => {
34
+ const prompt = (0, MistralInstructPromptTemplate_js_1.chat)().format({
35
+ messages: [{ role: "user", content: "user message" }],
36
+ });
37
+ expect(prompt).toMatchSnapshot();
38
+ });
39
+ it("should format prompt with user-assistant-user messages", () => {
40
+ const prompt = (0, MistralInstructPromptTemplate_js_1.chat)().format({
41
+ messages: [
42
+ { role: "user", content: "1st user message" },
43
+ { role: "assistant", content: "assistant message" },
44
+ { role: "user", content: "2nd user message" },
45
+ ],
46
+ });
47
+ expect(prompt).toMatchSnapshot();
48
+ });
49
+ it("should format prompt with system message and user-assistant-user messages", () => {
50
+ const prompt = (0, MistralInstructPromptTemplate_js_1.chat)().format({
51
+ system: "you are a chatbot",
52
+ messages: [
53
+ { role: "user", content: "1st user message" },
54
+ { role: "assistant", content: "assistant message" },
55
+ { role: "user", content: "2nd user message" },
56
+ ],
57
+ });
58
+ expect(prompt).toMatchSnapshot();
59
+ });
60
+ });
@@ -0,0 +1,58 @@
1
+ import { chat, instruction, text } from "./MistralInstructPromptTemplate.js";
2
+ describe("text prompt", () => {
3
+ it("should format prompt", () => {
4
+ const prompt = text().format("prompt");
5
+ expect(prompt).toMatchSnapshot();
6
+ });
7
+ });
8
+ describe("instruction prompt", () => {
9
+ it("should format prompt with instruction", () => {
10
+ const prompt = instruction().format({
11
+ instruction: "instruction",
12
+ });
13
+ expect(prompt).toMatchSnapshot();
14
+ });
15
+ it("should format prompt with system and instruction", () => {
16
+ const prompt = instruction().format({
17
+ system: "system",
18
+ instruction: "instruction",
19
+ });
20
+ expect(prompt).toMatchSnapshot();
21
+ });
22
+ it("should format prompt with instruction and response prefix", () => {
23
+ const prompt = instruction().format({
24
+ instruction: "instruction",
25
+ responsePrefix: "response prefix",
26
+ });
27
+ expect(prompt).toMatchSnapshot();
28
+ });
29
+ });
30
+ describe("chat prompt", () => {
31
+ it("should format prompt with user message", () => {
32
+ const prompt = chat().format({
33
+ messages: [{ role: "user", content: "user message" }],
34
+ });
35
+ expect(prompt).toMatchSnapshot();
36
+ });
37
+ it("should format prompt with user-assistant-user messages", () => {
38
+ const prompt = chat().format({
39
+ messages: [
40
+ { role: "user", content: "1st user message" },
41
+ { role: "assistant", content: "assistant message" },
42
+ { role: "user", content: "2nd user message" },
43
+ ],
44
+ });
45
+ expect(prompt).toMatchSnapshot();
46
+ });
47
+ it("should format prompt with system message and user-assistant-user messages", () => {
48
+ const prompt = chat().format({
49
+ system: "you are a chatbot",
50
+ messages: [
51
+ { role: "user", content: "1st user message" },
52
+ { role: "assistant", content: "assistant message" },
53
+ { role: "user", content: "2nd user message" },
54
+ ],
55
+ });
56
+ expect(prompt).toMatchSnapshot();
57
+ });
58
+ });
@@ -46,4 +46,15 @@ describe("chat prompt", () => {
46
46
  });
47
47
  expect(prompt).toMatchSnapshot();
48
48
  });
49
+ it("should format prompt with system message and user-assistant-user messages", () => {
50
+ const prompt = (0, NeuralChatPromptTemplate_js_1.chat)().format({
51
+ system: "you are a chatbot",
52
+ messages: [
53
+ { role: "user", content: "1st user message" },
54
+ { role: "assistant", content: "assistant message" },
55
+ { role: "user", content: "2nd user message" },
56
+ ],
57
+ });
58
+ expect(prompt).toMatchSnapshot();
59
+ });
49
60
  });
@@ -44,4 +44,15 @@ describe("chat prompt", () => {
44
44
  });
45
45
  expect(prompt).toMatchSnapshot();
46
46
  });
47
+ it("should format prompt with system message and user-assistant-user messages", () => {
48
+ const prompt = chat().format({
49
+ system: "you are a chatbot",
50
+ messages: [
51
+ { role: "user", content: "1st user message" },
52
+ { role: "assistant", content: "assistant message" },
53
+ { role: "user", content: "2nd user message" },
54
+ ],
55
+ });
56
+ expect(prompt).toMatchSnapshot();
57
+ });
47
58
  });
@@ -46,4 +46,15 @@ describe("chat prompt", () => {
46
46
  });
47
47
  expect(prompt).toMatchSnapshot();
48
48
  });
49
+ it("should format prompt with system message and user-assistant-user messages", () => {
50
+ const prompt = (0, TextPromptTemplate_js_1.chat)().format({
51
+ system: "you are a chatbot",
52
+ messages: [
53
+ { role: "user", content: "1st user message" },
54
+ { role: "assistant", content: "assistant message" },
55
+ { role: "user", content: "2nd user message" },
56
+ ],
57
+ });
58
+ expect(prompt).toMatchSnapshot();
59
+ });
49
60
  });
@@ -44,4 +44,15 @@ describe("chat prompt", () => {
44
44
  });
45
45
  expect(prompt).toMatchSnapshot();
46
46
  });
47
+ it("should format prompt with system message and user-assistant-user messages", () => {
48
+ const prompt = chat().format({
49
+ system: "you are a chatbot",
50
+ messages: [
51
+ { role: "user", content: "1st user message" },
52
+ { role: "assistant", content: "assistant message" },
53
+ { role: "user", content: "2nd user message" },
54
+ ],
55
+ });
56
+ expect(prompt).toMatchSnapshot();
57
+ });
47
58
  });
@@ -18,4 +18,15 @@ describe("chat prompt", () => {
18
18
  });
19
19
  expect(prompt).toMatchSnapshot();
20
20
  });
21
+ it("should format prompt with system message and user-assistant-user messages", () => {
22
+ const prompt = (0, VicunaPromptTemplate_js_1.chat)().format({
23
+ system: "you are a chatbot",
24
+ messages: [
25
+ { role: "user", content: "1st user message" },
26
+ { role: "assistant", content: "assistant message" },
27
+ { role: "user", content: "2nd user message" },
28
+ ],
29
+ });
30
+ expect(prompt).toMatchSnapshot();
31
+ });
21
32
  });
@@ -16,4 +16,15 @@ describe("chat prompt", () => {
16
16
  });
17
17
  expect(prompt).toMatchSnapshot();
18
18
  });
19
+ it("should format prompt with system message and user-assistant-user messages", () => {
20
+ const prompt = chat().format({
21
+ system: "you are a chatbot",
22
+ messages: [
23
+ { role: "user", content: "1st user message" },
24
+ { role: "assistant", content: "assistant message" },
25
+ { role: "user", content: "2nd user message" },
26
+ ],
27
+ });
28
+ expect(prompt).toMatchSnapshot();
29
+ });
19
30
  });
@@ -26,7 +26,7 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
26
26
  for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.VicunaPrompt = exports.TextPrompt = exports.NeuralChatPrompt = exports.Llama2Prompt = exports.ChatMLPrompt = exports.AlpacaPrompt = void 0;
29
+ exports.VicunaPrompt = exports.TextPrompt = exports.NeuralChatPrompt = exports.MistralInstructPrompt = exports.Llama2Prompt = exports.ChatMLPrompt = exports.AlpacaPrompt = void 0;
30
30
  exports.AlpacaPrompt = __importStar(require("./AlpacaPromptTemplate.cjs"));
31
31
  exports.ChatMLPrompt = __importStar(require("./ChatMLPromptTemplate.cjs"));
32
32
  __exportStar(require("./ChatPrompt.cjs"), exports);
@@ -34,6 +34,7 @@ __exportStar(require("./ContentPart.cjs"), exports);
34
34
  __exportStar(require("./InstructionPrompt.cjs"), exports);
35
35
  __exportStar(require("./InvalidPromptError.cjs"), exports);
36
36
  exports.Llama2Prompt = __importStar(require("./Llama2PromptTemplate.cjs"));
37
+ exports.MistralInstructPrompt = __importStar(require("./MistralInstructPromptTemplate.cjs"));
37
38
  exports.NeuralChatPrompt = __importStar(require("./NeuralChatPromptTemplate.cjs"));
38
39
  exports.TextPrompt = __importStar(require("./TextPromptTemplate.cjs"));
39
40
  exports.VicunaPrompt = __importStar(require("./VicunaPromptTemplate.cjs"));
@@ -5,6 +5,7 @@ export * from "./ContentPart.js";
5
5
  export * from "./InstructionPrompt.js";
6
6
  export * from "./InvalidPromptError.js";
7
7
  export * as Llama2Prompt from "./Llama2PromptTemplate.js";
8
+ export * as MistralInstructPrompt from "./MistralInstructPromptTemplate.js";
8
9
  export * as NeuralChatPrompt from "./NeuralChatPromptTemplate.js";
9
10
  export * as TextPrompt from "./TextPromptTemplate.js";
10
11
  export * as VicunaPrompt from "./VicunaPromptTemplate.js";
@@ -5,6 +5,7 @@ export * from "./ContentPart.js";
5
5
  export * from "./InstructionPrompt.js";
6
6
  export * from "./InvalidPromptError.js";
7
7
  export * as Llama2Prompt from "./Llama2PromptTemplate.js";
8
+ export * as MistralInstructPrompt from "./MistralInstructPromptTemplate.js";
8
9
  export * as NeuralChatPrompt from "./NeuralChatPromptTemplate.js";
9
10
  export * as TextPrompt from "./TextPromptTemplate.js";
10
11
  export * as VicunaPrompt from "./VicunaPromptTemplate.js";
@@ -20,7 +20,6 @@ __exportStar(require("./ModelCallEvent.cjs"), exports);
20
20
  __exportStar(require("./ModelCallMetadata.cjs"), exports);
21
21
  __exportStar(require("./ModelInformation.cjs"), exports);
22
22
  __exportStar(require("./PromptTemplate.cjs"), exports);
23
- __exportStar(require("./SuccessfulModelCall.cjs"), exports);
24
23
  __exportStar(require("./embed/EmbeddingEvent.cjs"), exports);
25
24
  __exportStar(require("./embed/EmbeddingModel.cjs"), exports);
26
25
  __exportStar(require("./embed/embed.cjs"), exports);
@@ -4,7 +4,6 @@ export * from "./ModelCallEvent.js";
4
4
  export * from "./ModelCallMetadata.js";
5
5
  export * from "./ModelInformation.js";
6
6
  export * from "./PromptTemplate.js";
7
- export * from "./SuccessfulModelCall.js";
8
7
  export * from "./embed/EmbeddingEvent.js";
9
8
  export * from "./embed/EmbeddingModel.js";
10
9
  export * from "./embed/embed.js";
@@ -4,7 +4,6 @@ export * from "./ModelCallEvent.js";
4
4
  export * from "./ModelCallMetadata.js";
5
5
  export * from "./ModelInformation.js";
6
6
  export * from "./PromptTemplate.js";
7
- export * from "./SuccessfulModelCall.js";
8
7
  export * from "./embed/EmbeddingEvent.js";
9
8
  export * from "./embed/EmbeddingModel.js";
10
9
  export * from "./embed/embed.js";