modelfusion 0.103.0 → 0.105.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +56 -0
- package/model-function/Delta.d.ts +1 -2
- package/model-function/executeStreamCall.cjs +6 -4
- package/model-function/executeStreamCall.d.ts +2 -2
- package/model-function/executeStreamCall.js +6 -4
- package/model-function/generate-speech/streamSpeech.cjs +1 -2
- package/model-function/generate-speech/streamSpeech.js +1 -2
- package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +25 -29
- package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +3 -1
- package/model-function/generate-structure/StructureFromTextStreamingModel.js +25 -29
- package/model-function/generate-structure/StructureGenerationModel.d.ts +2 -0
- package/model-function/generate-structure/streamStructure.cjs +7 -8
- package/model-function/generate-structure/streamStructure.d.ts +1 -1
- package/model-function/generate-structure/streamStructure.js +7 -8
- package/model-function/generate-text/PromptTemplateFullTextModel.cjs +35 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +41 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.js +31 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +3 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextStreamingModel.js +3 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/index.cjs +1 -0
- package/model-function/generate-text/index.d.ts +1 -0
- package/model-function/generate-text/index.js +1 -0
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +2 -1
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.d.ts +2 -2
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +2 -1
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs +9 -5
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.d.ts +4 -4
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.js +9 -5
- package/model-function/generate-text/prompt-template/ChatPrompt.cjs +38 -20
- package/model-function/generate-text/prompt-template/ChatPrompt.d.ts +33 -34
- package/model-function/generate-text/prompt-template/ChatPrompt.js +37 -18
- package/model-function/generate-text/prompt-template/ContentPart.cjs +11 -0
- package/model-function/generate-text/prompt-template/ContentPart.d.ts +30 -0
- package/model-function/generate-text/prompt-template/ContentPart.js +7 -0
- package/model-function/generate-text/prompt-template/InstructionPrompt.d.ts +7 -22
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +40 -6
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.d.ts +16 -4
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +38 -5
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs +10 -5
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.d.ts +4 -4
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js +10 -5
- package/model-function/generate-text/prompt-template/TextPromptTemplate.cjs +8 -5
- package/model-function/generate-text/prompt-template/TextPromptTemplate.d.ts +4 -4
- package/model-function/generate-text/prompt-template/TextPromptTemplate.js +8 -5
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +8 -4
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.d.ts +2 -2
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +8 -4
- package/model-function/generate-text/prompt-template/index.cjs +1 -1
- package/model-function/generate-text/prompt-template/index.d.ts +1 -1
- package/model-function/generate-text/prompt-template/index.js +1 -1
- package/model-function/generate-text/prompt-template/trimChatPrompt.cjs +0 -2
- package/model-function/generate-text/prompt-template/trimChatPrompt.d.ts +4 -4
- package/model-function/generate-text/prompt-template/trimChatPrompt.js +0 -2
- package/model-function/generate-text/streamText.cjs +27 -28
- package/model-function/generate-text/streamText.d.ts +1 -0
- package/model-function/generate-text/streamText.js +27 -28
- package/model-provider/anthropic/AnthropicPromptTemplate.cjs +9 -4
- package/model-provider/anthropic/AnthropicPromptTemplate.d.ts +4 -4
- package/model-provider/anthropic/AnthropicPromptTemplate.js +9 -4
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +13 -4
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.cjs +44 -0
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.js +42 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +1 -1
- package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -44
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +47 -13
- package/model-provider/cohere/CohereTextGenerationModel.js +7 -45
- package/model-provider/cohere/CohereTextGenerationModel.test.cjs +33 -0
- package/model-provider/cohere/CohereTextGenerationModel.test.js +31 -0
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +1 -2
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +1 -2
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +29 -17
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +4 -4
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +29 -17
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +7 -14
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +157 -6
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +8 -15
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.cjs +37 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.d.ts +1 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.js +35 -0
- package/model-provider/mistral/MistralChatModel.cjs +30 -104
- package/model-provider/mistral/MistralChatModel.d.ts +49 -16
- package/model-provider/mistral/MistralChatModel.js +30 -104
- package/model-provider/mistral/MistralChatModel.test.cjs +51 -0
- package/model-provider/mistral/MistralChatModel.test.d.ts +1 -0
- package/model-provider/mistral/MistralChatModel.test.js +49 -0
- package/model-provider/mistral/MistralPromptTemplate.cjs +13 -5
- package/model-provider/mistral/MistralPromptTemplate.d.ts +4 -4
- package/model-provider/mistral/MistralPromptTemplate.js +13 -5
- package/model-provider/ollama/OllamaChatModel.cjs +7 -43
- package/model-provider/ollama/OllamaChatModel.d.ts +63 -11
- package/model-provider/ollama/OllamaChatModel.js +7 -43
- package/model-provider/ollama/OllamaChatModel.test.cjs +27 -0
- package/model-provider/ollama/OllamaChatModel.test.d.ts +1 -0
- package/model-provider/ollama/OllamaChatModel.test.js +25 -0
- package/model-provider/ollama/OllamaChatPromptTemplate.cjs +43 -17
- package/model-provider/ollama/OllamaChatPromptTemplate.d.ts +4 -4
- package/model-provider/ollama/OllamaChatPromptTemplate.js +43 -17
- package/model-provider/ollama/OllamaCompletionModel.cjs +22 -43
- package/model-provider/ollama/OllamaCompletionModel.d.ts +65 -9
- package/model-provider/ollama/OllamaCompletionModel.js +23 -44
- package/model-provider/ollama/OllamaCompletionModel.test.cjs +101 -13
- package/model-provider/ollama/OllamaCompletionModel.test.js +78 -13
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.cjs → AbstractOpenAIChatModel.cjs} +71 -15
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.d.ts → AbstractOpenAIChatModel.d.ts} +273 -19
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.js → AbstractOpenAIChatModel.js} +71 -15
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.cjs → OpenAIChatFunctionCallStructureGenerationModel.cjs} +18 -2
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts → OpenAIChatFunctionCallStructureGenerationModel.d.ts} +41 -11
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.js → OpenAIChatFunctionCallStructureGenerationModel.js} +18 -2
- package/model-provider/openai/{chat/OpenAIChatMessage.d.ts → OpenAIChatMessage.d.ts} +3 -3
- package/model-provider/openai/{chat/OpenAIChatModel.cjs → OpenAIChatModel.cjs} +5 -5
- package/model-provider/openai/{chat/OpenAIChatModel.d.ts → OpenAIChatModel.d.ts} +12 -12
- package/model-provider/openai/{chat/OpenAIChatModel.js → OpenAIChatModel.js} +5 -5
- package/model-provider/openai/OpenAIChatModel.test.cjs +94 -0
- package/model-provider/openai/OpenAIChatModel.test.d.ts +1 -0
- package/model-provider/openai/OpenAIChatModel.test.js +92 -0
- package/model-provider/openai/OpenAIChatPromptTemplate.cjs +114 -0
- package/model-provider/openai/OpenAIChatPromptTemplate.d.ts +20 -0
- package/model-provider/openai/OpenAIChatPromptTemplate.js +107 -0
- package/model-provider/openai/OpenAICompletionModel.cjs +32 -84
- package/model-provider/openai/OpenAICompletionModel.d.ts +29 -12
- package/model-provider/openai/OpenAICompletionModel.js +33 -85
- package/model-provider/openai/OpenAICompletionModel.test.cjs +53 -0
- package/model-provider/openai/OpenAICompletionModel.test.d.ts +1 -0
- package/model-provider/openai/OpenAICompletionModel.test.js +51 -0
- package/model-provider/openai/OpenAICostCalculator.cjs +1 -1
- package/model-provider/openai/OpenAICostCalculator.js +1 -1
- package/model-provider/openai/OpenAIFacade.cjs +2 -2
- package/model-provider/openai/OpenAIFacade.d.ts +3 -3
- package/model-provider/openai/OpenAIFacade.js +2 -2
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +6 -6
- package/model-provider/openai/TikTokenTokenizer.d.ts +1 -1
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.cjs → countOpenAIChatMessageTokens.cjs} +2 -2
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.js → countOpenAIChatMessageTokens.js} +2 -2
- package/model-provider/openai/index.cjs +6 -6
- package/model-provider/openai/index.d.ts +5 -6
- package/model-provider/openai/index.js +5 -5
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +4 -4
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +6 -6
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +4 -4
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +1 -1
- package/package.json +5 -5
- package/test/JsonTestServer.cjs +33 -0
- package/test/JsonTestServer.d.ts +7 -0
- package/test/JsonTestServer.js +29 -0
- package/test/StreamingTestServer.cjs +55 -0
- package/test/StreamingTestServer.d.ts +7 -0
- package/test/StreamingTestServer.js +51 -0
- package/test/arrayFromAsync.cjs +13 -0
- package/test/arrayFromAsync.d.ts +1 -0
- package/test/arrayFromAsync.js +9 -0
- package/util/streaming/createEventSourceResponseHandler.cjs +9 -0
- package/util/streaming/createEventSourceResponseHandler.d.ts +4 -0
- package/util/streaming/createEventSourceResponseHandler.js +5 -0
- package/util/streaming/createJsonStreamResponseHandler.cjs +9 -0
- package/util/streaming/createJsonStreamResponseHandler.d.ts +4 -0
- package/util/streaming/createJsonStreamResponseHandler.js +5 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.cjs +52 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.js +48 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.cjs +21 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.js +17 -0
- package/model-function/generate-text/prompt-template/Content.cjs +0 -2
- package/model-function/generate-text/prompt-template/Content.d.ts +0 -20
- package/model-provider/openai/chat/OpenAIChatModel.test.cjs +0 -61
- package/model-provider/openai/chat/OpenAIChatModel.test.js +0 -59
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.cjs +0 -72
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.d.ts +0 -20
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.js +0 -65
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +0 -156
- package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +0 -19
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +0 -152
- /package/{model-function/generate-text/prompt-template/Content.js → model-provider/anthropic/AnthropicTextGenerationModel.test.d.ts} +0 -0
- /package/model-provider/{openai/chat/OpenAIChatModel.test.d.ts → cohere/CohereTextGenerationModel.test.d.ts} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.cjs → OpenAIChatMessage.cjs} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.js → OpenAIChatMessage.js} +0 -0
- /package/model-provider/openai/{chat/countOpenAIChatMessageTokens.d.ts → countOpenAIChatMessageTokens.d.ts} +0 -0
@@ -1,5 +1,5 @@
|
|
1
1
|
import { TextGenerationPromptTemplate } from "../TextGenerationPromptTemplate.js";
|
2
|
-
import {
|
2
|
+
import { InstructionPrompt } from "./InstructionPrompt.js";
|
3
3
|
/**
|
4
4
|
* Formats a text prompt as an Alpaca prompt.
|
5
5
|
*/
|
@@ -40,7 +40,7 @@ export declare function text(): TextGenerationPromptTemplate<string, string>;
|
|
40
40
|
*
|
41
41
|
* @see https://github.com/tatsu-lab/stanford_alpaca#data-release
|
42
42
|
*/
|
43
|
-
export declare function instruction(): TextGenerationPromptTemplate<
|
43
|
+
export declare function instruction(): TextGenerationPromptTemplate<InstructionPrompt & {
|
44
44
|
input?: string;
|
45
45
|
}, // optional input supported by Alpaca
|
46
46
|
string>;
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import { validateContentIsString } from "./ContentPart.js";
|
1
2
|
const DEFAULT_SYSTEM_PROMPT_INPUT = "Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.";
|
2
3
|
const DEFAULT_SYSTEM_PROMPT_NO_INPUT = "Below is an instruction that describes a task. Write a response that appropriately completes the request.";
|
3
4
|
/**
|
@@ -63,7 +64,7 @@ export function instruction() {
|
|
63
64
|
if (prompt.system != null) {
|
64
65
|
text += `${prompt.system}\n`;
|
65
66
|
}
|
66
|
-
text += prompt.instruction;
|
67
|
+
text += validateContentIsString(prompt.instruction, prompt);
|
67
68
|
if (prompt.input != null) {
|
68
69
|
text += `\n\n### Input:\n${prompt.input}`;
|
69
70
|
}
|
@@ -1,7 +1,8 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.chat = exports.instruction = exports.text = void 0;
|
4
|
-
const
|
4
|
+
const ContentPart_js_1 = require("./ContentPart.cjs");
|
5
|
+
const InvalidPromptError_js_1 = require("./InvalidPromptError.cjs");
|
5
6
|
const START_SEGMENT = "<|im_start|>";
|
6
7
|
const END_SEGMENT = "<|im_end|>";
|
7
8
|
function segmentStart(role) {
|
@@ -40,8 +41,9 @@ function instruction() {
|
|
40
41
|
return {
|
41
42
|
stopSequences: [END_SEGMENT],
|
42
43
|
format(prompt) {
|
44
|
+
const instruction = (0, ContentPart_js_1.validateContentIsString)(prompt.instruction, prompt);
|
43
45
|
return (segment("system", prompt.system) +
|
44
|
-
segment("user",
|
46
|
+
segment("user", instruction) +
|
45
47
|
segmentStart("assistant") +
|
46
48
|
(prompt.responsePrefix ?? ""));
|
47
49
|
},
|
@@ -64,18 +66,20 @@ exports.instruction = instruction;
|
|
64
66
|
function chat() {
|
65
67
|
return {
|
66
68
|
format(prompt) {
|
67
|
-
(0, ChatPrompt_js_1.validateChatPrompt)(prompt);
|
68
69
|
let text = prompt.system != null ? segment("system", prompt.system) : "";
|
69
70
|
for (const { role, content } of prompt.messages) {
|
70
71
|
switch (role) {
|
71
72
|
case "user": {
|
72
|
-
text += segment("user", content);
|
73
|
+
text += segment("user", (0, ContentPart_js_1.validateContentIsString)(content, prompt));
|
73
74
|
break;
|
74
75
|
}
|
75
76
|
case "assistant": {
|
76
|
-
text += segment("assistant", content);
|
77
|
+
text += segment("assistant", (0, ContentPart_js_1.validateContentIsString)(content, prompt));
|
77
78
|
break;
|
78
79
|
}
|
80
|
+
case "tool": {
|
81
|
+
throw new InvalidPromptError_js_1.InvalidPromptError("Tool messages are not supported.", prompt);
|
82
|
+
}
|
79
83
|
default: {
|
80
84
|
const _exhaustiveCheck = role;
|
81
85
|
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { TextGenerationPromptTemplate } from "../TextGenerationPromptTemplate.js";
|
2
|
-
import {
|
3
|
-
import {
|
2
|
+
import { ChatPrompt } from "./ChatPrompt.js";
|
3
|
+
import { InstructionPrompt } from "./InstructionPrompt.js";
|
4
4
|
/**
|
5
5
|
* Formats a text prompt using the ChatML format.
|
6
6
|
*/
|
@@ -18,7 +18,7 @@ export declare function text(): TextGenerationPromptTemplate<string, string>;
|
|
18
18
|
* ${response prefix}
|
19
19
|
* ```
|
20
20
|
*/
|
21
|
-
export declare function instruction(): TextGenerationPromptTemplate<
|
21
|
+
export declare function instruction(): TextGenerationPromptTemplate<InstructionPrompt, string>;
|
22
22
|
/**
|
23
23
|
* Formats a chat prompt using the ChatML format.
|
24
24
|
*
|
@@ -32,4 +32,4 @@ export declare function instruction(): TextGenerationPromptTemplate<TextInstruct
|
|
32
32
|
* Paris<|im_end|>
|
33
33
|
* ```
|
34
34
|
*/
|
35
|
-
export declare function chat(): TextGenerationPromptTemplate<
|
35
|
+
export declare function chat(): TextGenerationPromptTemplate<ChatPrompt, string>;
|
@@ -1,4 +1,5 @@
|
|
1
|
-
import {
|
1
|
+
import { validateContentIsString } from "./ContentPart.js";
|
2
|
+
import { InvalidPromptError } from "./InvalidPromptError.js";
|
2
3
|
const START_SEGMENT = "<|im_start|>";
|
3
4
|
const END_SEGMENT = "<|im_end|>";
|
4
5
|
function segmentStart(role) {
|
@@ -36,8 +37,9 @@ export function instruction() {
|
|
36
37
|
return {
|
37
38
|
stopSequences: [END_SEGMENT],
|
38
39
|
format(prompt) {
|
40
|
+
const instruction = validateContentIsString(prompt.instruction, prompt);
|
39
41
|
return (segment("system", prompt.system) +
|
40
|
-
segment("user",
|
42
|
+
segment("user", instruction) +
|
41
43
|
segmentStart("assistant") +
|
42
44
|
(prompt.responsePrefix ?? ""));
|
43
45
|
},
|
@@ -59,18 +61,20 @@ export function instruction() {
|
|
59
61
|
export function chat() {
|
60
62
|
return {
|
61
63
|
format(prompt) {
|
62
|
-
validateChatPrompt(prompt);
|
63
64
|
let text = prompt.system != null ? segment("system", prompt.system) : "";
|
64
65
|
for (const { role, content } of prompt.messages) {
|
65
66
|
switch (role) {
|
66
67
|
case "user": {
|
67
|
-
text += segment("user", content);
|
68
|
+
text += segment("user", validateContentIsString(content, prompt));
|
68
69
|
break;
|
69
70
|
}
|
70
71
|
case "assistant": {
|
71
|
-
text += segment("assistant", content);
|
72
|
+
text += segment("assistant", validateContentIsString(content, prompt));
|
72
73
|
break;
|
73
74
|
}
|
75
|
+
case "tool": {
|
76
|
+
throw new InvalidPromptError("Tool messages are not supported.", prompt);
|
77
|
+
}
|
74
78
|
default: {
|
75
79
|
const _exhaustiveCheck = role;
|
76
80
|
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
@@ -1,26 +1,44 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
3
|
+
exports.ChatMessage = void 0;
|
4
|
+
exports.ChatMessage = {
|
5
|
+
user({ text }) {
|
6
|
+
return {
|
7
|
+
role: "user",
|
8
|
+
content: text,
|
9
|
+
};
|
10
|
+
},
|
11
|
+
tool({ toolResults, }) {
|
12
|
+
return {
|
13
|
+
role: "tool",
|
14
|
+
content: createToolContent({ toolResults }),
|
15
|
+
};
|
16
|
+
},
|
17
|
+
assistant({ text, toolResults, }) {
|
18
|
+
return {
|
19
|
+
role: "assistant",
|
20
|
+
content: createAssistantContent({ text, toolResults }),
|
21
|
+
};
|
22
|
+
},
|
23
|
+
};
|
24
|
+
function createToolContent({ toolResults, }) {
|
25
|
+
const toolContent = [];
|
26
|
+
for (const { result, toolCall } of toolResults ?? []) {
|
27
|
+
toolContent.push({
|
28
|
+
type: "tool-response",
|
29
|
+
id: toolCall.id,
|
30
|
+
response: result,
|
31
|
+
});
|
14
32
|
}
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
}
|
33
|
+
return toolContent;
|
34
|
+
}
|
35
|
+
function createAssistantContent({ text, toolResults, }) {
|
36
|
+
const content = [];
|
37
|
+
if (text != null) {
|
38
|
+
content.push({ type: "text", text });
|
21
39
|
}
|
22
|
-
|
23
|
-
|
40
|
+
for (const { toolCall } of toolResults ?? []) {
|
41
|
+
content.push({ type: "tool-call", ...toolCall });
|
24
42
|
}
|
43
|
+
return content;
|
25
44
|
}
|
26
|
-
exports.validateChatPrompt = validateChatPrompt;
|
@@ -1,15 +1,14 @@
|
|
1
|
-
import {
|
1
|
+
import { ToolCallResult } from "../../../tool/ToolCallResult.js";
|
2
|
+
import { ImagePart, TextPart, ToolCallPart, ToolResponsePart } from "./ContentPart.js";
|
2
3
|
/**
|
3
|
-
* A
|
4
|
+
* A chat prompt is a combination of a system message and a list
|
5
|
+
* of user, assistant, and tool messages.
|
4
6
|
*
|
5
|
-
*
|
6
|
-
*
|
7
|
-
* - Then it must be alternating between an assistant message and a user message.
|
8
|
-
* - The last message must always be a user message (when submitting to a model).
|
7
|
+
* The user messages can contain multi-modal content.
|
8
|
+
* The assistant messages can contain tool calls.
|
9
9
|
*
|
10
|
-
*
|
11
|
-
*
|
12
|
-
* The type checking is done at runtime when you submit a chat prompt to a model with a prompt template.
|
10
|
+
* Note: Not all models and prompt formats support multi-modal inputs and tool calls.
|
11
|
+
* The validation happens at runtime.
|
13
12
|
*
|
14
13
|
* @example
|
15
14
|
* ```ts
|
@@ -22,38 +21,38 @@ import { MultiModalInput } from "./Content.js";
|
|
22
21
|
* ],
|
23
22
|
* };
|
24
23
|
* ```
|
25
|
-
*
|
26
|
-
* @see validateChatPrompt
|
27
24
|
*/
|
28
|
-
export interface
|
25
|
+
export interface ChatPrompt {
|
29
26
|
system?: string;
|
30
|
-
messages: Array<
|
27
|
+
messages: Array<ChatMessage>;
|
31
28
|
}
|
29
|
+
export type UserContent = string | Array<TextPart | ImagePart>;
|
30
|
+
export type AssistantContent = string | Array<TextPart | ToolCallPart>;
|
31
|
+
export type ToolContent = Array<ToolResponsePart>;
|
32
32
|
/**
|
33
|
-
* A
|
34
|
-
*
|
33
|
+
* A message in a chat prompt.
|
34
|
+
*
|
35
|
+
* @see ChatPrompt
|
35
36
|
*/
|
36
|
-
export type
|
37
|
+
export type ChatMessage = {
|
37
38
|
role: "user";
|
38
|
-
content:
|
39
|
+
content: UserContent;
|
39
40
|
} | {
|
40
41
|
role: "assistant";
|
41
|
-
content:
|
42
|
-
};
|
43
|
-
export interface MultiModalChatPrompt {
|
44
|
-
system?: string;
|
45
|
-
messages: Array<MultiModalChatMessage>;
|
46
|
-
}
|
47
|
-
export type MultiModalChatMessage = {
|
48
|
-
role: "user";
|
49
|
-
content: MultiModalInput;
|
42
|
+
content: AssistantContent;
|
50
43
|
} | {
|
51
|
-
role: "
|
52
|
-
content:
|
44
|
+
role: "tool";
|
45
|
+
content: ToolContent;
|
46
|
+
};
|
47
|
+
export declare const ChatMessage: {
|
48
|
+
user({ text }: {
|
49
|
+
text: string;
|
50
|
+
}): ChatMessage;
|
51
|
+
tool({ toolResults, }: {
|
52
|
+
toolResults: ToolCallResult<string, unknown, unknown>[] | null;
|
53
|
+
}): ChatMessage;
|
54
|
+
assistant({ text, toolResults, }: {
|
55
|
+
text: string | null;
|
56
|
+
toolResults: ToolCallResult<string, unknown, unknown>[] | null;
|
57
|
+
}): ChatMessage;
|
53
58
|
};
|
54
|
-
/**
|
55
|
-
* Checks if a chat prompt is valid. Throws a {@link ChatPromptValidationError} if it's not.
|
56
|
-
*
|
57
|
-
* @throws {@link ChatPromptValidationError}
|
58
|
-
*/
|
59
|
-
export declare function validateChatPrompt(chatPrompt: TextChatPrompt | MultiModalChatPrompt): void;
|
@@ -1,22 +1,41 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
1
|
+
export const ChatMessage = {
|
2
|
+
user({ text }) {
|
3
|
+
return {
|
4
|
+
role: "user",
|
5
|
+
content: text,
|
6
|
+
};
|
7
|
+
},
|
8
|
+
tool({ toolResults, }) {
|
9
|
+
return {
|
10
|
+
role: "tool",
|
11
|
+
content: createToolContent({ toolResults }),
|
12
|
+
};
|
13
|
+
},
|
14
|
+
assistant({ text, toolResults, }) {
|
15
|
+
return {
|
16
|
+
role: "assistant",
|
17
|
+
content: createAssistantContent({ text, toolResults }),
|
18
|
+
};
|
19
|
+
},
|
20
|
+
};
|
21
|
+
function createToolContent({ toolResults, }) {
|
22
|
+
const toolContent = [];
|
23
|
+
for (const { result, toolCall } of toolResults ?? []) {
|
24
|
+
toolContent.push({
|
25
|
+
type: "tool-response",
|
26
|
+
id: toolCall.id,
|
27
|
+
response: result,
|
28
|
+
});
|
11
29
|
}
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
}
|
30
|
+
return toolContent;
|
31
|
+
}
|
32
|
+
function createAssistantContent({ text, toolResults, }) {
|
33
|
+
const content = [];
|
34
|
+
if (text != null) {
|
35
|
+
content.push({ type: "text", text });
|
18
36
|
}
|
19
|
-
|
20
|
-
|
37
|
+
for (const { toolCall } of toolResults ?? []) {
|
38
|
+
content.push({ type: "tool-call", ...toolCall });
|
21
39
|
}
|
40
|
+
return content;
|
22
41
|
}
|
@@ -0,0 +1,11 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.validateContentIsString = void 0;
|
4
|
+
const InvalidPromptError_js_1 = require("./InvalidPromptError.cjs");
|
5
|
+
function validateContentIsString(content, prompt) {
|
6
|
+
if (typeof content !== "string") {
|
7
|
+
throw new InvalidPromptError_js_1.InvalidPromptError("Only text prompts are are supported by this prompt template.", prompt);
|
8
|
+
}
|
9
|
+
return content;
|
10
|
+
}
|
11
|
+
exports.validateContentIsString = validateContentIsString;
|
@@ -0,0 +1,30 @@
|
|
1
|
+
export interface TextPart {
|
2
|
+
type: "text";
|
3
|
+
/**
|
4
|
+
* The text content.
|
5
|
+
*/
|
6
|
+
text: string;
|
7
|
+
}
|
8
|
+
export interface ImagePart {
|
9
|
+
type: "image";
|
10
|
+
/**
|
11
|
+
* Base-64 encoded image.
|
12
|
+
*/
|
13
|
+
base64Image: string;
|
14
|
+
/**
|
15
|
+
* Optional mime type of the image.
|
16
|
+
*/
|
17
|
+
mimeType?: string;
|
18
|
+
}
|
19
|
+
export interface ToolCallPart {
|
20
|
+
type: "tool-call";
|
21
|
+
id: string;
|
22
|
+
name: string;
|
23
|
+
args: unknown;
|
24
|
+
}
|
25
|
+
export interface ToolResponsePart {
|
26
|
+
type: "tool-response";
|
27
|
+
id: string;
|
28
|
+
response: unknown;
|
29
|
+
}
|
30
|
+
export declare function validateContentIsString(content: string | unknown, prompt: unknown): string;
|
@@ -0,0 +1,7 @@
|
|
1
|
+
import { InvalidPromptError } from "./InvalidPromptError.js";
|
2
|
+
export function validateContentIsString(content, prompt) {
|
3
|
+
if (typeof content !== "string") {
|
4
|
+
throw new InvalidPromptError("Only text prompts are are supported by this prompt template.", prompt);
|
5
|
+
}
|
6
|
+
return content;
|
7
|
+
}
|
@@ -1,24 +1,10 @@
|
|
1
|
-
import {
|
2
|
-
/**
|
3
|
-
* A single multi-modal instruction prompt. It can contain an optional system message to define
|
4
|
-
* the role and behavior of the language model.
|
5
|
-
* The instruction is a multi-model input (`array` of content).
|
6
|
-
*/
|
7
|
-
export interface MultiModalInstructionPrompt {
|
8
|
-
/**
|
9
|
-
* Optional system message to provide context for the language model. Note that for some models,
|
10
|
-
* changing the system message can impact the results, because the model may be trained on the default system message.
|
11
|
-
*/
|
12
|
-
system?: string;
|
13
|
-
/**
|
14
|
-
* The multi-modal instruction for the model.
|
15
|
-
*/
|
16
|
-
instruction: MultiModalInput;
|
17
|
-
}
|
1
|
+
import { ImagePart, TextPart } from "./ContentPart";
|
18
2
|
/**
|
19
3
|
* A single text instruction prompt. It can contain an optional system message to define
|
20
4
|
* the role and behavior of the language model.
|
21
5
|
*
|
6
|
+
* The instruction can be a text instruction or a multi-modal instruction.
|
7
|
+
*
|
22
8
|
* @example
|
23
9
|
* ```ts
|
24
10
|
* {
|
@@ -27,21 +13,20 @@ export interface MultiModalInstructionPrompt {
|
|
27
13
|
* }
|
28
14
|
* ```
|
29
15
|
*/
|
30
|
-
export interface
|
16
|
+
export interface InstructionPrompt {
|
31
17
|
/**
|
32
18
|
* Optional system message to provide context for the language model. Note that for some models,
|
33
19
|
* changing the system message can impact the results, because the model may be trained on the default system message.
|
34
20
|
*/
|
35
21
|
system?: string;
|
36
22
|
/**
|
37
|
-
* The
|
23
|
+
* The instruction for the model.
|
38
24
|
*/
|
39
|
-
instruction:
|
25
|
+
instruction: InstructionContent;
|
40
26
|
/**
|
41
27
|
* Response prefix that will be injected in the prompt at the beginning of the response.
|
42
28
|
* This is useful for guiding the model by starting its response with a specific text.
|
43
|
-
*
|
44
|
-
* Note: Not all models support this feature. E.g. it is not supported by OpenAI chat models.
|
45
29
|
*/
|
46
30
|
responsePrefix?: string;
|
47
31
|
}
|
32
|
+
export type InstructionContent = string | Array<TextPart | ImagePart>;
|
@@ -1,7 +1,8 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.chat = exports.instruction = exports.text = void 0;
|
4
|
-
const
|
3
|
+
exports.validateLlama2Prompt = exports.chat = exports.instruction = exports.text = void 0;
|
4
|
+
const ContentPart_js_1 = require("./ContentPart.cjs");
|
5
|
+
const InvalidPromptError_js_1 = require("./InvalidPromptError.cjs");
|
5
6
|
// see https://github.com/facebookresearch/llama/blob/6c7fe276574e78057f917549435a2554000a876d/llama/generation.py#L44
|
6
7
|
const BEGIN_SEGMENT = "<s>";
|
7
8
|
const END_SEGMENT = " </s>";
|
@@ -47,9 +48,10 @@ function instruction() {
|
|
47
48
|
return {
|
48
49
|
stopSequences: [END_SEGMENT],
|
49
50
|
format(prompt) {
|
51
|
+
const instruction = (0, ContentPart_js_1.validateContentIsString)(prompt.instruction, prompt);
|
50
52
|
return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt.system != null
|
51
53
|
? `${BEGIN_SYSTEM}${prompt.system}${END_SYSTEM}`
|
52
|
-
: ""}${
|
54
|
+
: ""}${instruction}${END_INSTRUCTION}${prompt.responsePrefix ?? ""}`;
|
53
55
|
},
|
54
56
|
};
|
55
57
|
}
|
@@ -69,7 +71,7 @@ exports.instruction = instruction;
|
|
69
71
|
function chat() {
|
70
72
|
return {
|
71
73
|
format(prompt) {
|
72
|
-
(
|
74
|
+
validateLlama2Prompt(prompt);
|
73
75
|
let text = prompt.system != null
|
74
76
|
? // Separate section for system message to simplify implementation
|
75
77
|
// (this is slightly different from the original instructions):
|
@@ -78,13 +80,17 @@ function chat() {
|
|
78
80
|
for (const { role, content } of prompt.messages) {
|
79
81
|
switch (role) {
|
80
82
|
case "user": {
|
81
|
-
|
83
|
+
const textContent = (0, ContentPart_js_1.validateContentIsString)(content, prompt);
|
84
|
+
text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${textContent}${END_INSTRUCTION}`;
|
82
85
|
break;
|
83
86
|
}
|
84
87
|
case "assistant": {
|
85
|
-
text += `${content}${END_SEGMENT}`;
|
88
|
+
text += `${(0, ContentPart_js_1.validateContentIsString)(content, prompt)}${END_SEGMENT}`;
|
86
89
|
break;
|
87
90
|
}
|
91
|
+
case "tool": {
|
92
|
+
throw new InvalidPromptError_js_1.InvalidPromptError("Tool messages are not supported.", prompt);
|
93
|
+
}
|
88
94
|
default: {
|
89
95
|
const _exhaustiveCheck = role;
|
90
96
|
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
@@ -97,3 +103,31 @@ function chat() {
|
|
97
103
|
};
|
98
104
|
}
|
99
105
|
exports.chat = chat;
|
106
|
+
/**
|
107
|
+
* Checks if a Llama2 chat prompt is valid. Throws a {@link ChatPromptValidationError} if it's not.
|
108
|
+
*
|
109
|
+
* - The first message of the chat must be a user message.
|
110
|
+
* - Then it must be alternating between an assistant message and a user message.
|
111
|
+
* - The last message must always be a user message (when submitting to a model).
|
112
|
+
*
|
113
|
+
* The type checking is done at runtime when you submit a chat prompt to a model with a prompt template.
|
114
|
+
*
|
115
|
+
* @throws {@link ChatPromptValidationError}
|
116
|
+
*/
|
117
|
+
function validateLlama2Prompt(chatPrompt) {
|
118
|
+
const messages = chatPrompt.messages;
|
119
|
+
if (messages.length < 1) {
|
120
|
+
throw new InvalidPromptError_js_1.InvalidPromptError("ChatPrompt should have at least one message.", chatPrompt);
|
121
|
+
}
|
122
|
+
for (let i = 0; i < messages.length; i++) {
|
123
|
+
const expectedRole = i % 2 === 0 ? "user" : "assistant";
|
124
|
+
const role = messages[i].role;
|
125
|
+
if (role !== expectedRole) {
|
126
|
+
throw new InvalidPromptError_js_1.InvalidPromptError(`Message at index ${i} should have role '${expectedRole}', but has role '${role}'.`, chatPrompt);
|
127
|
+
}
|
128
|
+
}
|
129
|
+
if (messages.length % 2 === 0) {
|
130
|
+
throw new InvalidPromptError_js_1.InvalidPromptError("The last message must be a user message.", chatPrompt);
|
131
|
+
}
|
132
|
+
}
|
133
|
+
exports.validateLlama2Prompt = validateLlama2Prompt;
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { TextGenerationPromptTemplate } from "../TextGenerationPromptTemplate.js";
|
2
|
-
import {
|
3
|
-
import {
|
2
|
+
import { ChatPrompt } from "./ChatPrompt.js";
|
3
|
+
import { InstructionPrompt } from "./InstructionPrompt.js";
|
4
4
|
/**
|
5
5
|
* Formats a text prompt as a Llama 2 prompt.
|
6
6
|
*
|
@@ -27,7 +27,7 @@ export declare function text(): TextGenerationPromptTemplate<string, string>;
|
|
27
27
|
*
|
28
28
|
* @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
|
29
29
|
*/
|
30
|
-
export declare function instruction(): TextGenerationPromptTemplate<
|
30
|
+
export declare function instruction(): TextGenerationPromptTemplate<InstructionPrompt, string>;
|
31
31
|
/**
|
32
32
|
* Formats a chat prompt as a Llama 2 prompt.
|
33
33
|
*
|
@@ -40,4 +40,16 @@ export declare function instruction(): TextGenerationPromptTemplate<TextInstruct
|
|
40
40
|
* ${ user msg 1 } [/INST] ${ model response 1 } </s><s>[INST] ${ user msg 2 } [/INST] ${ model response 2 } </s><s>[INST] ${ user msg 3 } [/INST]
|
41
41
|
* ```
|
42
42
|
*/
|
43
|
-
export declare function chat(): TextGenerationPromptTemplate<
|
43
|
+
export declare function chat(): TextGenerationPromptTemplate<ChatPrompt, string>;
|
44
|
+
/**
|
45
|
+
* Checks if a Llama2 chat prompt is valid. Throws a {@link ChatPromptValidationError} if it's not.
|
46
|
+
*
|
47
|
+
* - The first message of the chat must be a user message.
|
48
|
+
* - Then it must be alternating between an assistant message and a user message.
|
49
|
+
* - The last message must always be a user message (when submitting to a model).
|
50
|
+
*
|
51
|
+
* The type checking is done at runtime when you submit a chat prompt to a model with a prompt template.
|
52
|
+
*
|
53
|
+
* @throws {@link ChatPromptValidationError}
|
54
|
+
*/
|
55
|
+
export declare function validateLlama2Prompt(chatPrompt: ChatPrompt): void;
|