modelfusion 0.68.1 → 0.70.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -9
- package/core/schema/JSONParseError.cjs +1 -1
- package/core/schema/JSONParseError.d.ts +1 -1
- package/core/schema/JSONParseError.js +1 -1
- package/model-function/ModelCallEvent.d.ts +3 -2
- package/model-function/generate-structure/StructureValidationError.cjs +11 -0
- package/model-function/generate-structure/StructureValidationError.d.ts +9 -0
- package/model-function/generate-structure/StructureValidationError.js +11 -0
- package/model-function/generate-text/index.cjs +1 -8
- package/model-function/generate-text/index.d.ts +1 -8
- package/model-function/generate-text/index.js +1 -8
- package/model-function/generate-text/prompt-format/AlpacaPromptFormat.cjs +31 -3
- package/model-function/generate-text/prompt-format/AlpacaPromptFormat.d.ts +29 -1
- package/model-function/generate-text/prompt-format/AlpacaPromptFormat.js +29 -1
- package/model-function/generate-text/prompt-format/ChatMLPromptFormat.cjs +79 -0
- package/model-function/generate-text/prompt-format/ChatMLPromptFormat.d.ts +31 -0
- package/model-function/generate-text/prompt-format/ChatMLPromptFormat.js +74 -0
- package/model-function/generate-text/prompt-format/ChatPrompt.d.ts +28 -23
- package/model-function/generate-text/prompt-format/ChatPromptValidationError.cjs +17 -0
- package/model-function/generate-text/prompt-format/ChatPromptValidationError.d.ts +8 -0
- package/model-function/generate-text/prompt-format/ChatPromptValidationError.js +13 -0
- package/model-function/generate-text/prompt-format/Llama2PromptFormat.cjs +41 -27
- package/model-function/generate-text/prompt-format/Llama2PromptFormat.d.ts +20 -2
- package/model-function/generate-text/prompt-format/Llama2PromptFormat.js +38 -24
- package/model-function/generate-text/prompt-format/TextPromptFormat.cjs +27 -30
- package/model-function/generate-text/prompt-format/TextPromptFormat.d.ts +7 -5
- package/model-function/generate-text/prompt-format/TextPromptFormat.js +24 -27
- package/model-function/generate-text/prompt-format/VicunaPromptFormat.cjs +21 -29
- package/model-function/generate-text/prompt-format/VicunaPromptFormat.d.ts +2 -2
- package/model-function/generate-text/prompt-format/VicunaPromptFormat.js +19 -27
- package/model-function/generate-text/prompt-format/index.cjs +39 -0
- package/model-function/generate-text/prompt-format/index.d.ts +10 -0
- package/model-function/generate-text/prompt-format/index.js +10 -0
- package/model-function/generate-text/prompt-format/trimChatPrompt.cjs +17 -22
- package/model-function/generate-text/prompt-format/trimChatPrompt.js +17 -22
- package/model-function/generate-text/prompt-format/validateChatPrompt.cjs +12 -24
- package/model-function/generate-text/prompt-format/validateChatPrompt.d.ts +0 -3
- package/model-function/generate-text/prompt-format/validateChatPrompt.js +10 -21
- package/model-function/generate-tool-call/ToolCallDefinition.cjs +2 -0
- package/model-function/generate-tool-call/ToolCallDefinition.d.ts +7 -0
- package/model-function/generate-tool-call/ToolCallDefinition.js +1 -0
- package/model-function/generate-tool-call/ToolCallGenerationError.cjs +35 -0
- package/model-function/generate-tool-call/ToolCallGenerationError.d.ts +15 -0
- package/model-function/generate-tool-call/ToolCallGenerationError.js +31 -0
- package/model-function/generate-tool-call/ToolCallGenerationEvent.cjs +2 -0
- package/model-function/generate-tool-call/ToolCallGenerationEvent.d.ts +23 -0
- package/model-function/generate-tool-call/ToolCallGenerationEvent.js +1 -0
- package/model-function/generate-tool-call/ToolCallGenerationModel.cjs +2 -0
- package/model-function/generate-tool-call/ToolCallGenerationModel.d.ts +19 -0
- package/model-function/generate-tool-call/ToolCallGenerationModel.js +1 -0
- package/model-function/generate-tool-call/ToolCallParametersValidationError.cjs +44 -0
- package/model-function/generate-tool-call/ToolCallParametersValidationError.d.ts +18 -0
- package/model-function/generate-tool-call/ToolCallParametersValidationError.js +40 -0
- package/model-function/generate-tool-call/generateToolCall.cjs +58 -0
- package/model-function/generate-tool-call/generateToolCall.d.ts +20 -0
- package/model-function/generate-tool-call/generateToolCall.js +54 -0
- package/model-function/generate-tool-call/index.cjs +21 -0
- package/model-function/generate-tool-call/index.d.ts +5 -0
- package/model-function/generate-tool-call/index.js +5 -0
- package/model-function/index.cjs +1 -0
- package/model-function/index.d.ts +1 -0
- package/model-function/index.js +1 -0
- package/model-provider/anthropic/AnthropicPromptFormat.cjs +22 -26
- package/model-provider/anthropic/AnthropicPromptFormat.d.ts +4 -2
- package/model-provider/anthropic/AnthropicPromptFormat.js +19 -23
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +2 -2
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +3 -3
- package/model-provider/anthropic/index.cjs +14 -2
- package/model-provider/anthropic/index.d.ts +1 -1
- package/model-provider/anthropic/index.js +1 -1
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.cjs +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +1 -1
- package/model-provider/cohere/CohereTextGenerationModel.js +4 -4
- package/model-provider/llamacpp/{mapInstructionPromptToBakLLaVA1ForLlamaCppFormat.cjs → LlamaCppBakLLaVA1Format.cjs} +4 -4
- package/model-provider/llamacpp/{mapInstructionPromptToBakLLaVA1ForLlamaCppFormat.d.ts → LlamaCppBakLLaVA1Format.d.ts} +2 -2
- package/model-provider/llamacpp/{mapInstructionPromptToBakLLaVA1ForLlamaCppFormat.js → LlamaCppBakLLaVA1Format.js} +2 -2
- package/model-provider/llamacpp/index.cjs +14 -2
- package/model-provider/llamacpp/index.d.ts +1 -1
- package/model-provider/llamacpp/index.js +1 -1
- package/model-provider/openai/OpenAICompletionModel.cjs +4 -4
- package/model-provider/openai/OpenAICompletionModel.d.ts +1 -1
- package/model-provider/openai/OpenAICompletionModel.js +5 -5
- package/model-provider/openai/chat/OpenAIChatModel.cjs +50 -4
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +192 -20
- package/model-provider/openai/chat/OpenAIChatModel.js +51 -5
- package/model-provider/openai/chat/OpenAIChatPromptFormat.cjs +22 -34
- package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +2 -2
- package/model-provider/openai/chat/OpenAIChatPromptFormat.js +19 -31
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +10 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +10 -0
- package/model-provider/openai/index.cjs +14 -2
- package/model-provider/openai/index.d.ts +1 -1
- package/model-provider/openai/index.js +1 -1
- package/package.json +3 -3
@@ -5,8 +5,8 @@ import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
|
|
5
5
|
/**
|
6
6
|
* Formats an instruction prompt as an OpenAI chat prompt.
|
7
7
|
*/
|
8
|
-
export declare function
|
8
|
+
export declare function instruction(): TextGenerationPromptFormat<InstructionPrompt, Array<OpenAIChatMessage>>;
|
9
9
|
/**
|
10
10
|
* Formats a chat prompt as an OpenAI chat prompt.
|
11
11
|
*/
|
12
|
-
export declare function
|
12
|
+
export declare function chat(): TextGenerationPromptFormat<ChatPrompt, Array<OpenAIChatMessage>>;
|
@@ -3,7 +3,7 @@ import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
|
|
3
3
|
/**
|
4
4
|
* Formats an instruction prompt as an OpenAI chat prompt.
|
5
5
|
*/
|
6
|
-
export function
|
6
|
+
export function instruction() {
|
7
7
|
return {
|
8
8
|
format: (instruction) => {
|
9
9
|
const messages = [];
|
@@ -24,41 +24,29 @@ export function mapInstructionPromptToOpenAIChatFormat() {
|
|
24
24
|
/**
|
25
25
|
* Formats a chat prompt as an OpenAI chat prompt.
|
26
26
|
*/
|
27
|
-
export function
|
27
|
+
export function chat() {
|
28
28
|
return {
|
29
29
|
format: (chatPrompt) => {
|
30
30
|
validateChatPrompt(chatPrompt);
|
31
31
|
const messages = [];
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
role:
|
48
|
-
|
49
|
-
});
|
50
|
-
continue;
|
51
|
-
}
|
52
|
-
// ai message:
|
53
|
-
if ("ai" in message) {
|
54
|
-
messages.push({
|
55
|
-
role: "assistant",
|
56
|
-
content: message.ai,
|
57
|
-
});
|
58
|
-
continue;
|
32
|
+
if (chatPrompt.system != null) {
|
33
|
+
messages.push(OpenAIChatMessage.system(chatPrompt.system));
|
34
|
+
}
|
35
|
+
for (const { role, content } of chatPrompt.messages) {
|
36
|
+
switch (role) {
|
37
|
+
case "user": {
|
38
|
+
messages.push(OpenAIChatMessage.user(content));
|
39
|
+
break;
|
40
|
+
}
|
41
|
+
case "assistant": {
|
42
|
+
messages.push(OpenAIChatMessage.assistant(content));
|
43
|
+
break;
|
44
|
+
}
|
45
|
+
default: {
|
46
|
+
const _exhaustiveCheck = role;
|
47
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
48
|
+
}
|
59
49
|
}
|
60
|
-
// unsupported message:
|
61
|
-
throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
|
62
50
|
}
|
63
51
|
return messages;
|
64
52
|
},
|
@@ -18,6 +18,16 @@ const chatResponseStreamEventSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.objec
|
|
18
18
|
arguments: zod_1.z.string().optional(),
|
19
19
|
})
|
20
20
|
.optional(),
|
21
|
+
tool_calls: zod_1.z
|
22
|
+
.array(zod_1.z.object({
|
23
|
+
id: zod_1.z.string(),
|
24
|
+
type: zod_1.z.literal("function"),
|
25
|
+
function: zod_1.z.object({
|
26
|
+
name: zod_1.z.string(),
|
27
|
+
arguments: zod_1.z.string(),
|
28
|
+
}),
|
29
|
+
}))
|
30
|
+
.optional(),
|
21
31
|
}),
|
22
32
|
finish_reason: zod_1.z
|
23
33
|
.enum([
|
@@ -15,6 +15,16 @@ const chatResponseStreamEventSchema = new ZodSchema(z.object({
|
|
15
15
|
arguments: z.string().optional(),
|
16
16
|
})
|
17
17
|
.optional(),
|
18
|
+
tool_calls: z
|
19
|
+
.array(z.object({
|
20
|
+
id: z.string(),
|
21
|
+
type: z.literal("function"),
|
22
|
+
function: z.object({
|
23
|
+
name: z.string(),
|
24
|
+
arguments: z.string(),
|
25
|
+
}),
|
26
|
+
}))
|
27
|
+
.optional(),
|
18
28
|
}),
|
19
29
|
finish_reason: z
|
20
30
|
.enum([
|
@@ -10,11 +10,23 @@ var __createBinding = (this && this.__createBinding) || (Object.create ? (functi
|
|
10
10
|
if (k2 === undefined) k2 = k;
|
11
11
|
o[k2] = m[k];
|
12
12
|
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
13
18
|
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
14
19
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
15
20
|
};
|
21
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
22
|
+
if (mod && mod.__esModule) return mod;
|
23
|
+
var result = {};
|
24
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
25
|
+
__setModuleDefault(result, mod);
|
26
|
+
return result;
|
27
|
+
};
|
16
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
17
|
-
exports.OpenAIError = void 0;
|
29
|
+
exports.OpenAIChatPromptFormat = exports.OpenAIError = void 0;
|
18
30
|
__exportStar(require("./AzureOpenAIApiConfiguration.cjs"), exports);
|
19
31
|
__exportStar(require("./OpenAIApiConfiguration.cjs"), exports);
|
20
32
|
__exportStar(require("./OpenAICompletionModel.cjs"), exports);
|
@@ -28,5 +40,5 @@ __exportStar(require("./OpenAITranscriptionModel.cjs"), exports);
|
|
28
40
|
__exportStar(require("./TikTokenTokenizer.cjs"), exports);
|
29
41
|
__exportStar(require("./chat/OpenAIChatMessage.cjs"), exports);
|
30
42
|
__exportStar(require("./chat/OpenAIChatModel.cjs"), exports);
|
31
|
-
|
43
|
+
exports.OpenAIChatPromptFormat = __importStar(require("./chat/OpenAIChatPromptFormat.cjs"));
|
32
44
|
__exportStar(require("./chat/countOpenAIChatMessageTokens.cjs"), exports);
|
@@ -10,6 +10,6 @@ export * from "./OpenAITranscriptionModel.js";
|
|
10
10
|
export * from "./TikTokenTokenizer.js";
|
11
11
|
export * from "./chat/OpenAIChatMessage.js";
|
12
12
|
export * from "./chat/OpenAIChatModel.js";
|
13
|
-
export * from "./chat/OpenAIChatPromptFormat.js";
|
13
|
+
export * as OpenAIChatPromptFormat from "./chat/OpenAIChatPromptFormat.js";
|
14
14
|
export { OpenAIChatDelta } from "./chat/OpenAIChatStreamIterable.js";
|
15
15
|
export * from "./chat/countOpenAIChatMessageTokens.js";
|
@@ -10,5 +10,5 @@ export * from "./OpenAITranscriptionModel.js";
|
|
10
10
|
export * from "./TikTokenTokenizer.js";
|
11
11
|
export * from "./chat/OpenAIChatMessage.js";
|
12
12
|
export * from "./chat/OpenAIChatModel.js";
|
13
|
-
export * from "./chat/OpenAIChatPromptFormat.js";
|
13
|
+
export * as OpenAIChatPromptFormat from "./chat/OpenAIChatPromptFormat.js";
|
14
14
|
export * from "./chat/countOpenAIChatMessageTokens.js";
|
package/package.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
{
|
2
2
|
"name": "modelfusion",
|
3
3
|
"description": "Build multimodal applications, chatbots, and agents with JavaScript and TypeScript.",
|
4
|
-
"version": "0.
|
4
|
+
"version": "0.70.0",
|
5
5
|
"author": "Lars Grammel",
|
6
6
|
"license": "MIT",
|
7
7
|
"keywords": [
|
@@ -81,8 +81,8 @@
|
|
81
81
|
"eslint-config-prettier": "9.0.0",
|
82
82
|
"fastify": "^4.0.0",
|
83
83
|
"husky": "^8.0.3",
|
84
|
-
"lint-staged": "15.0
|
85
|
-
"prettier": "3.0
|
84
|
+
"lint-staged": "15.1.0",
|
85
|
+
"prettier": "3.1.0",
|
86
86
|
"rimraf": "5.0.5",
|
87
87
|
"typescript": "5.2.2",
|
88
88
|
"vitest": "^0.34.5"
|