modelfusion 0.87.1 → 0.88.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/model-function/generate-text/PromptFormatTextGenerationModel.d.ts +1 -1
- package/model-function/generate-text/prompt-format/ChatMLPromptFormat.d.ts +2 -2
- package/model-function/generate-text/prompt-format/ChatPrompt.d.ts +23 -8
- package/model-function/generate-text/prompt-format/Llama2PromptFormat.d.ts +2 -2
- package/model-function/generate-text/prompt-format/TextPromptFormat.d.ts +2 -2
- package/model-function/generate-text/prompt-format/VicunaPromptFormat.d.ts +2 -2
- package/model-function/generate-text/prompt-format/trimChatPrompt.d.ts +4 -4
- package/model-provider/anthropic/AnthropicPromptFormat.d.ts +2 -2
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +1 -1
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +1 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1Format.cjs +50 -2
- package/model-provider/llamacpp/LlamaCppBakLLaVA1Format.d.ts +3 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1Format.js +47 -0
- package/model-provider/openai/OpenAICompletionModel.d.ts +1 -1
- package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts +1 -1
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +1 -1
- package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +2 -2
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +1 -1
- package/package.json +1 -1
@@ -1,6 +1,6 @@
|
|
1
1
|
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
2
2
|
import { StructureFromTextGenerationModel } from "../../model-function/generate-structure/StructureFromTextGenerationModel.js";
|
3
|
-
import { StructureFromTextPromptFormat } from "model-function/generate-structure/StructureFromTextPromptFormat.js";
|
3
|
+
import { StructureFromTextPromptFormat } from "../../model-function/generate-structure/StructureFromTextPromptFormat.js";
|
4
4
|
import { TextGenerationToolCallModel, ToolCallPromptFormat } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
|
5
5
|
import { TextGenerationToolCallsOrGenerateTextModel, ToolCallsOrGenerateTextPromptFormat } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
|
6
6
|
import { TextGenerationModel, TextGenerationModelSettings } from "./TextGenerationModel.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { TextGenerationPromptFormat } from "../TextGenerationPromptFormat.js";
|
2
|
-
import {
|
2
|
+
import { TextChatPrompt } from "./ChatPrompt.js";
|
3
3
|
import { TextInstructionPrompt } from "./InstructionPrompt.js";
|
4
4
|
/**
|
5
5
|
* Formats a text prompt using the ChatML format.
|
@@ -32,4 +32,4 @@ export declare function instruction(): TextGenerationPromptFormat<TextInstructio
|
|
32
32
|
* Paris<|im_end|>
|
33
33
|
* ```
|
34
34
|
*/
|
35
|
-
export declare function chat(): TextGenerationPromptFormat<
|
35
|
+
export declare function chat(): TextGenerationPromptFormat<TextChatPrompt, string>;
|
@@ -1,5 +1,6 @@
|
|
1
|
+
import { MultiModalInput } from "./Content.js";
|
1
2
|
/**
|
2
|
-
* A chat prompt is a combination of a system message and a list of messages with the following constraints:
|
3
|
+
* A textual chat prompt is a combination of a system message and a list of messages with the following constraints:
|
3
4
|
*
|
4
5
|
* - A chat prompt can optionally have a system message.
|
5
6
|
* - The first message of the chat must be a user message.
|
@@ -24,21 +25,35 @@
|
|
24
25
|
*
|
25
26
|
* @see validateChatPrompt
|
26
27
|
*/
|
27
|
-
export interface
|
28
|
+
export interface TextChatPrompt {
|
28
29
|
system?: string;
|
29
|
-
messages: Array<
|
30
|
+
messages: Array<TextChatMessage>;
|
30
31
|
}
|
31
32
|
/**
|
32
|
-
* A message in a chat prompt.
|
33
|
-
* @see
|
33
|
+
* A text message in a chat prompt.
|
34
|
+
* @see TextChatPrompt
|
34
35
|
*/
|
35
|
-
export
|
36
|
-
role: "user"
|
36
|
+
export type TextChatMessage = {
|
37
|
+
role: "user";
|
37
38
|
content: string;
|
39
|
+
} | {
|
40
|
+
role: "assistant";
|
41
|
+
content: string;
|
42
|
+
};
|
43
|
+
export interface MultiModalChatPrompt {
|
44
|
+
system?: string;
|
45
|
+
messages: Array<MultiModalChatMessage>;
|
38
46
|
}
|
47
|
+
export type MultiModalChatMessage = {
|
48
|
+
role: "user";
|
49
|
+
content: MultiModalInput;
|
50
|
+
} | {
|
51
|
+
role: "assistant";
|
52
|
+
content: string;
|
53
|
+
};
|
39
54
|
/**
|
40
55
|
* Checks if a chat prompt is valid. Throws a {@link ChatPromptValidationError} if it's not.
|
41
56
|
*
|
42
57
|
* @throws {@link ChatPromptValidationError}
|
43
58
|
*/
|
44
|
-
export declare function validateChatPrompt(chatPrompt:
|
59
|
+
export declare function validateChatPrompt(chatPrompt: TextChatPrompt | MultiModalChatPrompt): void;
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { TextGenerationPromptFormat } from "../TextGenerationPromptFormat.js";
|
2
|
-
import {
|
2
|
+
import { TextChatPrompt } from "./ChatPrompt.js";
|
3
3
|
import { TextInstructionPrompt } from "./InstructionPrompt.js";
|
4
4
|
/**
|
5
5
|
* Formats a text prompt as a Llama 2 prompt.
|
@@ -39,4 +39,4 @@ export declare function instruction(): TextGenerationPromptFormat<TextInstructio
|
|
39
39
|
* ${ user msg 1 } [/INST] ${ model response 1 } </s><s>[INST] ${ user msg 2 } [/INST] ${ model response 2 } </s><s>[INST] ${ user msg 3 } [/INST]
|
40
40
|
* ```
|
41
41
|
*/
|
42
|
-
export declare function chat(): TextGenerationPromptFormat<
|
42
|
+
export declare function chat(): TextGenerationPromptFormat<TextChatPrompt, string>;
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { TextGenerationPromptFormat } from "../TextGenerationPromptFormat.js";
|
2
|
-
import {
|
2
|
+
import { TextChatPrompt } from "./ChatPrompt.js";
|
3
3
|
import { TextInstructionPrompt } from "./InstructionPrompt.js";
|
4
4
|
/**
|
5
5
|
* Formats a text prompt as a basic text prompt. Does not change the text prompt in any way.
|
@@ -20,4 +20,4 @@ export declare const chat: (options?: {
|
|
20
20
|
user?: string;
|
21
21
|
assistant?: string;
|
22
22
|
system?: string;
|
23
|
-
}) => TextGenerationPromptFormat<
|
23
|
+
}) => TextGenerationPromptFormat<TextChatPrompt, string>;
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { TextGenerationPromptFormat } from "../TextGenerationPromptFormat.js";
|
2
|
-
import {
|
2
|
+
import { TextChatPrompt } from "./ChatPrompt.js";
|
3
3
|
/**
|
4
4
|
* Formats a chat prompt as a Vicuna prompt.
|
5
5
|
*
|
@@ -13,4 +13,4 @@ import { ChatPrompt } from "./ChatPrompt.js";
|
|
13
13
|
* ASSISTANT:
|
14
14
|
* ```
|
15
15
|
*/
|
16
|
-
export declare function chat(): TextGenerationPromptFormat<
|
16
|
+
export declare function chat(): TextGenerationPromptFormat<TextChatPrompt, string>;
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { HasContextWindowSize, HasTokenizer, TextGenerationModel, TextGenerationModelSettings } from "../TextGenerationModel.js";
|
2
|
-
import {
|
2
|
+
import { TextChatPrompt } from "./ChatPrompt.js";
|
3
3
|
/**
|
4
4
|
* Keeps only the most recent messages in the prompt, while leaving enough space for the completion.
|
5
5
|
*
|
@@ -11,7 +11,7 @@ import { ChatPrompt } from "./ChatPrompt.js";
|
|
11
11
|
* @see https://modelfusion.dev/guide/function/generate-text#limiting-the-chat-length
|
12
12
|
*/
|
13
13
|
export declare function trimChatPrompt({ prompt, model, tokenLimit, }: {
|
14
|
-
prompt:
|
15
|
-
model: TextGenerationModel<
|
14
|
+
prompt: TextChatPrompt;
|
15
|
+
model: TextGenerationModel<TextChatPrompt, TextGenerationModelSettings> & HasTokenizer<TextChatPrompt> & HasContextWindowSize;
|
16
16
|
tokenLimit?: number;
|
17
|
-
}): Promise<
|
17
|
+
}): Promise<TextChatPrompt>;
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { TextGenerationPromptFormat } from "../../model-function/generate-text/TextGenerationPromptFormat.js";
|
2
|
-
import {
|
2
|
+
import { TextChatPrompt } from "../../model-function/generate-text/prompt-format/ChatPrompt.js";
|
3
3
|
import { TextInstructionPrompt } from "../../model-function/generate-text/prompt-format/InstructionPrompt.js";
|
4
4
|
/**
|
5
5
|
* Formats a text prompt as an Anthropic prompt.
|
@@ -14,4 +14,4 @@ export declare function instruction(): TextGenerationPromptFormat<TextInstructio
|
|
14
14
|
*
|
15
15
|
* @see https://docs.anthropic.com/claude/docs/constructing-a-prompt
|
16
16
|
*/
|
17
|
-
export declare function chat(): TextGenerationPromptFormat<
|
17
|
+
export declare function chat(): TextGenerationPromptFormat<TextChatPrompt, string>;
|
@@ -69,7 +69,7 @@ export declare class AnthropicTextGenerationModel extends AbstractModel<Anthropi
|
|
69
69
|
/**
|
70
70
|
* Returns this model with a chat prompt format.
|
71
71
|
*/
|
72
|
-
withChatPrompt(): PromptFormatTextStreamingModel<import("../../index.js").
|
72
|
+
withChatPrompt(): PromptFormatTextStreamingModel<import("../../index.js").TextChatPrompt, string, AnthropicTextGenerationModelSettings, this>;
|
73
73
|
withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, AnthropicTextGenerationModelSettings, this>;
|
74
74
|
withSettings(additionalSettings: Partial<AnthropicTextGenerationModelSettings>): this;
|
75
75
|
}
|
@@ -94,7 +94,7 @@ export declare class CohereTextGenerationModel extends AbstractModel<CohereTextG
|
|
94
94
|
withChatPrompt(options?: {
|
95
95
|
user?: string;
|
96
96
|
assistant?: string;
|
97
|
-
}): PromptFormatTextStreamingModel<import("../../index.js").
|
97
|
+
}): PromptFormatTextStreamingModel<import("../../index.js").TextChatPrompt, string, CohereTextGenerationModelSettings, this>;
|
98
98
|
withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, CohereTextGenerationModelSettings, this>;
|
99
99
|
withSettings(additionalSettings: Partial<CohereTextGenerationModelSettings>): this;
|
100
100
|
}
|
@@ -1,6 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.instruction = void 0;
|
3
|
+
exports.chat = exports.instruction = void 0;
|
4
|
+
const ChatPrompt_js_1 = require("../../model-function/generate-text/prompt-format/ChatPrompt.cjs");
|
4
5
|
// default Vicuna 1 system message
|
5
6
|
const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial intelligence assistant. " +
|
6
7
|
"The assistant gives helpful, detailed, and polite answers to the user's questions.";
|
@@ -33,10 +34,57 @@ function instruction() {
|
|
33
34
|
}
|
34
35
|
text += `${content}\n`;
|
35
36
|
}
|
36
|
-
text +=
|
37
|
+
text += `\nASSISTANT: `;
|
37
38
|
return { text, images };
|
38
39
|
},
|
39
40
|
stopSequences: [`\nUSER:`],
|
40
41
|
};
|
41
42
|
}
|
42
43
|
exports.instruction = instruction;
|
44
|
+
function chat() {
|
45
|
+
return {
|
46
|
+
format(prompt) {
|
47
|
+
(0, ChatPrompt_js_1.validateChatPrompt)(prompt);
|
48
|
+
let text = "";
|
49
|
+
text += `${prompt.system ?? DEFAULT_SYSTEM_MESSAGE}\n\n`;
|
50
|
+
// construct text and image mapping:
|
51
|
+
let imageCounter = 1;
|
52
|
+
const images = {};
|
53
|
+
for (const { role, content } of prompt.messages) {
|
54
|
+
switch (role) {
|
55
|
+
case "user": {
|
56
|
+
text += `USER: `;
|
57
|
+
for (const part of content) {
|
58
|
+
switch (part.type) {
|
59
|
+
case "text": {
|
60
|
+
text += part.text;
|
61
|
+
break;
|
62
|
+
}
|
63
|
+
case "image": {
|
64
|
+
text += `[img-${imageCounter}]`;
|
65
|
+
images[imageCounter.toString()] = part.base64Image;
|
66
|
+
imageCounter++;
|
67
|
+
break;
|
68
|
+
}
|
69
|
+
}
|
70
|
+
}
|
71
|
+
break;
|
72
|
+
}
|
73
|
+
case "assistant": {
|
74
|
+
text += `ASSISTANT: ${content}`;
|
75
|
+
break;
|
76
|
+
}
|
77
|
+
default: {
|
78
|
+
const _exhaustiveCheck = role;
|
79
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
80
|
+
}
|
81
|
+
}
|
82
|
+
text += `\n\n`;
|
83
|
+
}
|
84
|
+
text += `ASSISTANT: `;
|
85
|
+
return { text, images };
|
86
|
+
},
|
87
|
+
stopSequences: [`\nUSER:`],
|
88
|
+
};
|
89
|
+
}
|
90
|
+
exports.chat = chat;
|
@@ -1,5 +1,6 @@
|
|
1
|
-
import { MultiModalInstructionPrompt } from "../../model-function/generate-text/prompt-format/InstructionPrompt.js";
|
2
1
|
import { TextGenerationPromptFormat } from "../../model-function/generate-text/TextGenerationPromptFormat.js";
|
2
|
+
import { MultiModalChatPrompt } from "../../model-function/generate-text/prompt-format/ChatPrompt.js";
|
3
|
+
import { MultiModalInstructionPrompt } from "../../model-function/generate-text/prompt-format/InstructionPrompt.js";
|
3
4
|
import { LlamaCppTextGenerationPrompt } from "./LlamaCppTextGenerationModel.js";
|
4
5
|
/**
|
5
6
|
* BakLLaVA 1 uses a Vicuna 1 prompt. This mapping combines it with the LlamaCpp prompt structure.
|
@@ -7,3 +8,4 @@ import { LlamaCppTextGenerationPrompt } from "./LlamaCppTextGenerationModel.js";
|
|
7
8
|
* @see https://github.com/SkunkworksAI/BakLLaVA
|
8
9
|
*/
|
9
10
|
export declare function instruction(): TextGenerationPromptFormat<MultiModalInstructionPrompt, LlamaCppTextGenerationPrompt>;
|
11
|
+
export declare function chat(): TextGenerationPromptFormat<MultiModalChatPrompt, LlamaCppTextGenerationPrompt>;
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import { validateChatPrompt, } from "../../model-function/generate-text/prompt-format/ChatPrompt.js";
|
1
2
|
// default Vicuna 1 system message
|
2
3
|
const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial intelligence assistant. " +
|
3
4
|
"The assistant gives helpful, detailed, and polite answers to the user's questions.";
|
@@ -30,6 +31,52 @@ export function instruction() {
|
|
30
31
|
}
|
31
32
|
text += `${content}\n`;
|
32
33
|
}
|
34
|
+
text += `\nASSISTANT: `;
|
35
|
+
return { text, images };
|
36
|
+
},
|
37
|
+
stopSequences: [`\nUSER:`],
|
38
|
+
};
|
39
|
+
}
|
40
|
+
export function chat() {
|
41
|
+
return {
|
42
|
+
format(prompt) {
|
43
|
+
validateChatPrompt(prompt);
|
44
|
+
let text = "";
|
45
|
+
text += `${prompt.system ?? DEFAULT_SYSTEM_MESSAGE}\n\n`;
|
46
|
+
// construct text and image mapping:
|
47
|
+
let imageCounter = 1;
|
48
|
+
const images = {};
|
49
|
+
for (const { role, content } of prompt.messages) {
|
50
|
+
switch (role) {
|
51
|
+
case "user": {
|
52
|
+
text += `USER: `;
|
53
|
+
for (const part of content) {
|
54
|
+
switch (part.type) {
|
55
|
+
case "text": {
|
56
|
+
text += part.text;
|
57
|
+
break;
|
58
|
+
}
|
59
|
+
case "image": {
|
60
|
+
text += `[img-${imageCounter}]`;
|
61
|
+
images[imageCounter.toString()] = part.base64Image;
|
62
|
+
imageCounter++;
|
63
|
+
break;
|
64
|
+
}
|
65
|
+
}
|
66
|
+
}
|
67
|
+
break;
|
68
|
+
}
|
69
|
+
case "assistant": {
|
70
|
+
text += `ASSISTANT: ${content}`;
|
71
|
+
break;
|
72
|
+
}
|
73
|
+
default: {
|
74
|
+
const _exhaustiveCheck = role;
|
75
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
76
|
+
}
|
77
|
+
}
|
78
|
+
text += `\n\n`;
|
79
|
+
}
|
33
80
|
text += `ASSISTANT: `;
|
34
81
|
return { text, images };
|
35
82
|
},
|
@@ -183,7 +183,7 @@ export declare class OpenAICompletionModel extends AbstractModel<OpenAICompletio
|
|
183
183
|
withChatPrompt(options?: {
|
184
184
|
user?: string;
|
185
185
|
assistant?: string;
|
186
|
-
}): PromptFormatTextStreamingModel<import("../../index.js").
|
186
|
+
}): PromptFormatTextStreamingModel<import("../../index.js").TextChatPrompt, string, OpenAICompletionModelSettings, this>;
|
187
187
|
withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, OpenAICompletionModelSettings, this>;
|
188
188
|
withSettings(additionalSettings: Partial<OpenAICompletionModelSettings>): this;
|
189
189
|
}
|
@@ -31,7 +31,7 @@ OpenAIChatSettings> {
|
|
31
31
|
/**
|
32
32
|
* Returns this model with a chat prompt format.
|
33
33
|
*/
|
34
|
-
withChatPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptFormat<import("../../../index.js").
|
34
|
+
withChatPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptFormat<import("../../../index.js").TextChatPrompt | import("../../../index.js").MultiModalChatPrompt, import("./OpenAIChatMessage.js").OpenAIChatMessage[]>>;
|
35
35
|
withPromptFormat<TARGET_PROMPT_FORMAT extends TextGenerationPromptFormat<unknown, OpenAIChatPrompt>>(promptFormat: TARGET_PROMPT_FORMAT): OpenAIChatFunctionCallStructureGenerationModel<TARGET_PROMPT_FORMAT>;
|
36
36
|
withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
|
37
37
|
/**
|
@@ -155,7 +155,7 @@ export declare class OpenAIChatModel extends AbstractOpenAIChatModel<OpenAIChatS
|
|
155
155
|
/**
|
156
156
|
* Returns this model with a chat prompt format.
|
157
157
|
*/
|
158
|
-
withChatPrompt(): PromptFormatTextStreamingModel<import("../../../index.js").
|
158
|
+
withChatPrompt(): PromptFormatTextStreamingModel<import("../../../index.js").TextChatPrompt | import("../../../index.js").MultiModalChatPrompt, OpenAIChatPrompt, OpenAIChatSettings, this>;
|
159
159
|
withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, OpenAIChatPrompt>): PromptFormatTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, OpenAIChatSettings, this>;
|
160
160
|
withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
|
161
161
|
}
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { TextGenerationPromptFormat } from "../../../model-function/generate-text/TextGenerationPromptFormat.js";
|
2
|
-
import {
|
2
|
+
import { MultiModalChatPrompt, TextChatPrompt } from "../../../model-function/generate-text/prompt-format/ChatPrompt.js";
|
3
3
|
import { MultiModalInstructionPrompt, TextInstructionPrompt } from "../../../model-function/generate-text/prompt-format/InstructionPrompt.js";
|
4
4
|
import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
|
5
5
|
/**
|
@@ -17,4 +17,4 @@ export declare function instruction(): TextGenerationPromptFormat<MultiModalInst
|
|
17
17
|
/**
|
18
18
|
* Formats a chat prompt as an OpenAI chat prompt.
|
19
19
|
*/
|
20
|
-
export declare function chat(): TextGenerationPromptFormat<
|
20
|
+
export declare function chat(): TextGenerationPromptFormat<MultiModalChatPrompt | TextChatPrompt, Array<OpenAIChatMessage>>;
|
@@ -39,7 +39,7 @@ export declare class OpenAICompatibleChatModel extends AbstractOpenAIChatModel<O
|
|
39
39
|
/**
|
40
40
|
* Returns this model with a chat prompt format.
|
41
41
|
*/
|
42
|
-
withChatPrompt(): PromptFormatTextStreamingModel<import("../../index.js").
|
42
|
+
withChatPrompt(): PromptFormatTextStreamingModel<import("../../index.js").TextChatPrompt | import("../../index.js").MultiModalChatPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
|
43
43
|
withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, OpenAIChatPrompt>): PromptFormatTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
|
44
44
|
withSettings(additionalSettings: Partial<OpenAICompatibleChatSettings>): this;
|
45
45
|
}
|