modelfusion 0.106.0 → 0.107.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/README.md +8 -49
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +9 -7
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +9 -7
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.cjs +150 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.d.ts +62 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.js +143 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.cjs +60 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.js +58 -0
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/TextPromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/TextPromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/index.cjs +2 -1
- package/model-function/generate-text/prompt-template/index.d.ts +1 -0
- package/model-function/generate-text/prompt-template/index.js +1 -0
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +3 -3
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.cjs → LlamaCppCompletionModel.cjs} +8 -8
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.d.ts → LlamaCppCompletionModel.d.ts} +26 -26
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.js → LlamaCppCompletionModel.js} +6 -6
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.test.cjs → LlamaCppCompletionModel.test.cjs} +3 -3
- package/model-provider/llamacpp/LlamaCppCompletionModel.test.d.ts +1 -0
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.test.js → LlamaCppCompletionModel.test.js} +3 -3
- package/model-provider/llamacpp/LlamaCppFacade.cjs +2 -2
- package/model-provider/llamacpp/LlamaCppFacade.d.ts +2 -2
- package/model-provider/llamacpp/LlamaCppFacade.js +2 -2
- package/model-provider/llamacpp/index.cjs +1 -1
- package/model-provider/llamacpp/index.d.ts +1 -1
- package/model-provider/llamacpp/index.js +1 -1
- package/model-provider/mistral/MistralChatModel.cjs +4 -4
- package/model-provider/mistral/MistralChatModel.d.ts +6 -6
- package/model-provider/mistral/MistralChatModel.js +1 -1
- package/model-provider/mistral/index.cjs +3 -3
- package/model-provider/mistral/index.d.ts +2 -2
- package/model-provider/mistral/index.js +2 -2
- package/model-provider/openai/AbstractOpenAIChatModel.d.ts +8 -8
- package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.d.ts +1 -1
- package/model-provider/openai/OpenAICompletionModel.d.ts +6 -6
- package/package.json +1 -1
- package/tool/generate-tool-call/index.cjs +1 -0
- package/tool/generate-tool-call/index.d.ts +1 -0
- package/tool/generate-tool-call/index.js +1 -0
- package/tool/generate-tool-call/jsonToolCallPrompt.cjs +30 -0
- package/tool/generate-tool-call/jsonToolCallPrompt.d.ts +5 -0
- package/tool/generate-tool-call/jsonToolCallPrompt.js +27 -0
- /package/{model-provider/llamacpp/LlamaCppTextGenerationModel.test.d.ts → model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.d.ts} +0 -0
- /package/model-provider/mistral/{MistralPromptTemplate.cjs → MistralChatPromptTemplate.cjs} +0 -0
- /package/model-provider/mistral/{MistralPromptTemplate.d.ts → MistralChatPromptTemplate.d.ts} +0 -0
- /package/model-provider/mistral/{MistralPromptTemplate.js → MistralChatPromptTemplate.js} +0 -0
@@ -33,5 +33,5 @@ var LlamaCppError_js_1 = require("./LlamaCppError.cjs");
|
|
33
33
|
Object.defineProperty(exports, "LlamaCppError", { enumerable: true, get: function () { return LlamaCppError_js_1.LlamaCppError; } });
|
34
34
|
exports.llamacpp = __importStar(require("./LlamaCppFacade.cjs"));
|
35
35
|
__exportStar(require("./LlamaCppTextEmbeddingModel.cjs"), exports);
|
36
|
-
__exportStar(require("./
|
36
|
+
__exportStar(require("./LlamaCppCompletionModel.cjs"), exports);
|
37
37
|
__exportStar(require("./LlamaCppTokenizer.cjs"), exports);
|
@@ -3,5 +3,5 @@ export * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
|
|
3
3
|
export { LlamaCppError, LlamaCppErrorData } from "./LlamaCppError.js";
|
4
4
|
export * as llamacpp from "./LlamaCppFacade.js";
|
5
5
|
export * from "./LlamaCppTextEmbeddingModel.js";
|
6
|
-
export * from "./
|
6
|
+
export * from "./LlamaCppCompletionModel.js";
|
7
7
|
export * from "./LlamaCppTokenizer.js";
|
@@ -3,5 +3,5 @@ export * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
|
|
3
3
|
export { LlamaCppError } from "./LlamaCppError.js";
|
4
4
|
export * as llamacpp from "./LlamaCppFacade.js";
|
5
5
|
export * from "./LlamaCppTextEmbeddingModel.js";
|
6
|
-
export * from "./
|
6
|
+
export * from "./LlamaCppCompletionModel.js";
|
7
7
|
export * from "./LlamaCppTokenizer.js";
|
@@ -10,8 +10,8 @@ const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/gene
|
|
10
10
|
const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
|
11
11
|
const createEventSourceResponseHandler_js_1 = require("../../util/streaming/createEventSourceResponseHandler.cjs");
|
12
12
|
const MistralApiConfiguration_js_1 = require("./MistralApiConfiguration.cjs");
|
13
|
+
const MistralChatPromptTemplate_js_1 = require("./MistralChatPromptTemplate.cjs");
|
13
14
|
const MistralError_js_1 = require("./MistralError.cjs");
|
14
|
-
const MistralPromptTemplate_js_1 = require("./MistralPromptTemplate.cjs");
|
15
15
|
class MistralChatModel extends AbstractModel_js_1.AbstractModel {
|
16
16
|
constructor(settings) {
|
17
17
|
super({ settings });
|
@@ -118,19 +118,19 @@ class MistralChatModel extends AbstractModel_js_1.AbstractModel {
|
|
118
118
|
* Returns this model with a text prompt template.
|
119
119
|
*/
|
120
120
|
withTextPrompt() {
|
121
|
-
return this.withPromptTemplate((0,
|
121
|
+
return this.withPromptTemplate((0, MistralChatPromptTemplate_js_1.text)());
|
122
122
|
}
|
123
123
|
/**
|
124
124
|
* Returns this model with an instruction prompt template.
|
125
125
|
*/
|
126
126
|
withInstructionPrompt() {
|
127
|
-
return this.withPromptTemplate((0,
|
127
|
+
return this.withPromptTemplate((0, MistralChatPromptTemplate_js_1.instruction)());
|
128
128
|
}
|
129
129
|
/**
|
130
130
|
* Returns this model with a chat prompt template.
|
131
131
|
*/
|
132
132
|
withChatPrompt() {
|
133
|
-
return this.withPromptTemplate((0,
|
133
|
+
return this.withPromptTemplate((0, MistralChatPromptTemplate_js_1.chat)());
|
134
134
|
}
|
135
135
|
withPromptTemplate(promptTemplate) {
|
136
136
|
return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
|
@@ -64,8 +64,8 @@ export declare class MistralChatModel extends AbstractModel<MistralChatModelSett
|
|
64
64
|
object: string;
|
65
65
|
usage: {
|
66
66
|
prompt_tokens: number;
|
67
|
-
total_tokens: number;
|
68
67
|
completion_tokens: number;
|
68
|
+
total_tokens: number;
|
69
69
|
};
|
70
70
|
model: string;
|
71
71
|
id: string;
|
@@ -154,19 +154,19 @@ declare const mistralChatResponseSchema: z.ZodObject<{
|
|
154
154
|
total_tokens: z.ZodNumber;
|
155
155
|
}, "strip", z.ZodTypeAny, {
|
156
156
|
prompt_tokens: number;
|
157
|
-
total_tokens: number;
|
158
157
|
completion_tokens: number;
|
158
|
+
total_tokens: number;
|
159
159
|
}, {
|
160
160
|
prompt_tokens: number;
|
161
|
-
total_tokens: number;
|
162
161
|
completion_tokens: number;
|
162
|
+
total_tokens: number;
|
163
163
|
}>;
|
164
164
|
}, "strip", z.ZodTypeAny, {
|
165
165
|
object: string;
|
166
166
|
usage: {
|
167
167
|
prompt_tokens: number;
|
168
|
-
total_tokens: number;
|
169
168
|
completion_tokens: number;
|
169
|
+
total_tokens: number;
|
170
170
|
};
|
171
171
|
model: string;
|
172
172
|
id: string;
|
@@ -183,8 +183,8 @@ declare const mistralChatResponseSchema: z.ZodObject<{
|
|
183
183
|
object: string;
|
184
184
|
usage: {
|
185
185
|
prompt_tokens: number;
|
186
|
-
total_tokens: number;
|
187
186
|
completion_tokens: number;
|
187
|
+
total_tokens: number;
|
188
188
|
};
|
189
189
|
model: string;
|
190
190
|
id: string;
|
@@ -228,8 +228,8 @@ export declare const MistralChatResponseFormat: {
|
|
228
228
|
object: string;
|
229
229
|
usage: {
|
230
230
|
prompt_tokens: number;
|
231
|
-
total_tokens: number;
|
232
231
|
completion_tokens: number;
|
232
|
+
total_tokens: number;
|
233
233
|
};
|
234
234
|
model: string;
|
235
235
|
id: string;
|
@@ -7,8 +7,8 @@ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-
|
|
7
7
|
import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
|
8
8
|
import { createEventSourceResponseHandler } from "../../util/streaming/createEventSourceResponseHandler.js";
|
9
9
|
import { MistralApiConfiguration } from "./MistralApiConfiguration.js";
|
10
|
+
import { chat, instruction, text } from "./MistralChatPromptTemplate.js";
|
10
11
|
import { failedMistralCallResponseHandler } from "./MistralError.js";
|
11
|
-
import { chat, instruction, text } from "./MistralPromptTemplate.js";
|
12
12
|
export class MistralChatModel extends AbstractModel {
|
13
13
|
constructor(settings) {
|
14
14
|
super({ settings });
|
@@ -26,9 +26,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
26
26
|
return result;
|
27
27
|
};
|
28
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
29
|
-
exports.
|
29
|
+
exports.mistral = exports.MistralChatPrompt = void 0;
|
30
30
|
__exportStar(require("./MistralApiConfiguration.cjs"), exports);
|
31
|
+
__exportStar(require("./MistralChatModel.cjs"), exports);
|
32
|
+
exports.MistralChatPrompt = __importStar(require("./MistralChatPromptTemplate.cjs"));
|
31
33
|
exports.mistral = __importStar(require("./MistralFacade.cjs"));
|
32
|
-
exports.MistralPrompt = __importStar(require("./MistralPromptTemplate.cjs"));
|
33
34
|
__exportStar(require("./MistralTextEmbeddingModel.cjs"), exports);
|
34
|
-
__exportStar(require("./MistralChatModel.cjs"), exports);
|
@@ -1,6 +1,6 @@
|
|
1
1
|
export * from "./MistralApiConfiguration.js";
|
2
|
+
export * from "./MistralChatModel.js";
|
3
|
+
export * as MistralChatPrompt from "./MistralChatPromptTemplate.js";
|
2
4
|
export { MistralErrorData } from "./MistralError.js";
|
3
5
|
export * as mistral from "./MistralFacade.js";
|
4
|
-
export * as MistralPrompt from "./MistralPromptTemplate.js";
|
5
6
|
export * from "./MistralTextEmbeddingModel.js";
|
6
|
-
export * from "./MistralChatModel.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
export * from "./MistralApiConfiguration.js";
|
2
|
+
export * from "./MistralChatModel.js";
|
3
|
+
export * as MistralChatPrompt from "./MistralChatPromptTemplate.js";
|
2
4
|
export * as mistral from "./MistralFacade.js";
|
3
|
-
export * as MistralPrompt from "./MistralPromptTemplate.js";
|
4
5
|
export * from "./MistralTextEmbeddingModel.js";
|
5
|
-
export * from "./MistralChatModel.js";
|
@@ -97,8 +97,8 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
97
97
|
object: "chat.completion";
|
98
98
|
usage: {
|
99
99
|
prompt_tokens: number;
|
100
|
-
total_tokens: number;
|
101
100
|
completion_tokens: number;
|
101
|
+
total_tokens: number;
|
102
102
|
};
|
103
103
|
model: string;
|
104
104
|
id: string;
|
@@ -172,8 +172,8 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
172
172
|
object: "chat.completion";
|
173
173
|
usage: {
|
174
174
|
prompt_tokens: number;
|
175
|
-
total_tokens: number;
|
176
175
|
completion_tokens: number;
|
176
|
+
total_tokens: number;
|
177
177
|
};
|
178
178
|
model: string;
|
179
179
|
id: string;
|
@@ -216,8 +216,8 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
216
216
|
object: "chat.completion";
|
217
217
|
usage: {
|
218
218
|
prompt_tokens: number;
|
219
|
-
total_tokens: number;
|
220
219
|
completion_tokens: number;
|
220
|
+
total_tokens: number;
|
221
221
|
};
|
222
222
|
model: string;
|
223
223
|
id: string;
|
@@ -392,19 +392,19 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
392
392
|
total_tokens: z.ZodNumber;
|
393
393
|
}, "strip", z.ZodTypeAny, {
|
394
394
|
prompt_tokens: number;
|
395
|
-
total_tokens: number;
|
396
395
|
completion_tokens: number;
|
396
|
+
total_tokens: number;
|
397
397
|
}, {
|
398
398
|
prompt_tokens: number;
|
399
|
-
total_tokens: number;
|
400
399
|
completion_tokens: number;
|
400
|
+
total_tokens: number;
|
401
401
|
}>;
|
402
402
|
}, "strip", z.ZodTypeAny, {
|
403
403
|
object: "chat.completion";
|
404
404
|
usage: {
|
405
405
|
prompt_tokens: number;
|
406
|
-
total_tokens: number;
|
407
406
|
completion_tokens: number;
|
407
|
+
total_tokens: number;
|
408
408
|
};
|
409
409
|
model: string;
|
410
410
|
id: string;
|
@@ -435,8 +435,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
435
435
|
object: "chat.completion";
|
436
436
|
usage: {
|
437
437
|
prompt_tokens: number;
|
438
|
-
total_tokens: number;
|
439
438
|
completion_tokens: number;
|
439
|
+
total_tokens: number;
|
440
440
|
};
|
441
441
|
model: string;
|
442
442
|
id: string;
|
@@ -683,8 +683,8 @@ export declare const OpenAIChatResponseFormat: {
|
|
683
683
|
object: "chat.completion";
|
684
684
|
usage: {
|
685
685
|
prompt_tokens: number;
|
686
|
-
total_tokens: number;
|
687
686
|
completion_tokens: number;
|
687
|
+
total_tokens: number;
|
688
688
|
};
|
689
689
|
model: string;
|
690
690
|
id: string;
|
@@ -148,8 +148,8 @@ export declare class OpenAICompletionModel extends AbstractModel<OpenAICompletio
|
|
148
148
|
object: "text_completion";
|
149
149
|
usage: {
|
150
150
|
prompt_tokens: number;
|
151
|
-
total_tokens: number;
|
152
151
|
completion_tokens: number;
|
152
|
+
total_tokens: number;
|
153
153
|
};
|
154
154
|
model: string;
|
155
155
|
id: string;
|
@@ -228,19 +228,19 @@ declare const OpenAICompletionResponseSchema: z.ZodObject<{
|
|
228
228
|
total_tokens: z.ZodNumber;
|
229
229
|
}, "strip", z.ZodTypeAny, {
|
230
230
|
prompt_tokens: number;
|
231
|
-
total_tokens: number;
|
232
231
|
completion_tokens: number;
|
232
|
+
total_tokens: number;
|
233
233
|
}, {
|
234
234
|
prompt_tokens: number;
|
235
|
-
total_tokens: number;
|
236
235
|
completion_tokens: number;
|
236
|
+
total_tokens: number;
|
237
237
|
}>;
|
238
238
|
}, "strip", z.ZodTypeAny, {
|
239
239
|
object: "text_completion";
|
240
240
|
usage: {
|
241
241
|
prompt_tokens: number;
|
242
|
-
total_tokens: number;
|
243
242
|
completion_tokens: number;
|
243
|
+
total_tokens: number;
|
244
244
|
};
|
245
245
|
model: string;
|
246
246
|
id: string;
|
@@ -256,8 +256,8 @@ declare const OpenAICompletionResponseSchema: z.ZodObject<{
|
|
256
256
|
object: "text_completion";
|
257
257
|
usage: {
|
258
258
|
prompt_tokens: number;
|
259
|
-
total_tokens: number;
|
260
259
|
completion_tokens: number;
|
260
|
+
total_tokens: number;
|
261
261
|
};
|
262
262
|
model: string;
|
263
263
|
id: string;
|
@@ -285,8 +285,8 @@ export declare const OpenAITextResponseFormat: {
|
|
285
285
|
object: "text_completion";
|
286
286
|
usage: {
|
287
287
|
prompt_tokens: number;
|
288
|
-
total_tokens: number;
|
289
288
|
completion_tokens: number;
|
289
|
+
total_tokens: number;
|
290
290
|
};
|
291
291
|
model: string;
|
292
292
|
id: string;
|
package/package.json
CHANGED
@@ -19,3 +19,4 @@ __exportStar(require("./ToolCallGenerationEvent.cjs"), exports);
|
|
19
19
|
__exportStar(require("./ToolCallGenerationModel.cjs"), exports);
|
20
20
|
__exportStar(require("./ToolCallParseError.cjs"), exports);
|
21
21
|
__exportStar(require("./generateToolCall.cjs"), exports);
|
22
|
+
__exportStar(require("./jsonToolCallPrompt.cjs"), exports);
|
@@ -0,0 +1,30 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.jsonToolCallPrompt = void 0;
|
4
|
+
const nanoid_1 = require("nanoid");
|
5
|
+
const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
|
6
|
+
exports.jsonToolCallPrompt = {
|
7
|
+
text() {
|
8
|
+
return {
|
9
|
+
createPrompt(instruction, tool) {
|
10
|
+
return {
|
11
|
+
system: [
|
12
|
+
`You are calling a function "${tool.name}".`,
|
13
|
+
tool.description != null
|
14
|
+
? ` Function description: ${tool.description}`
|
15
|
+
: null,
|
16
|
+
` Function parameters JSON schema: ${JSON.stringify(tool.parameters.getJsonSchema())}`,
|
17
|
+
``,
|
18
|
+
`You MUST answer with a JSON object matches the above schema for the arguments.`,
|
19
|
+
]
|
20
|
+
.filter(Boolean)
|
21
|
+
.join("\n"),
|
22
|
+
instruction,
|
23
|
+
};
|
24
|
+
},
|
25
|
+
extractToolCall(response) {
|
26
|
+
return { id: (0, nanoid_1.nanoid)(), args: (0, parseJSON_js_1.parseJSON)({ text: response }) };
|
27
|
+
},
|
28
|
+
};
|
29
|
+
},
|
30
|
+
};
|
@@ -0,0 +1,5 @@
|
|
1
|
+
import { InstructionPrompt } from "../../model-function/generate-text/prompt-template/InstructionPrompt.js";
|
2
|
+
import { ToolCallPromptTemplate } from "./TextGenerationToolCallModel.js";
|
3
|
+
export declare const jsonToolCallPrompt: {
|
4
|
+
text(): ToolCallPromptTemplate<string, InstructionPrompt>;
|
5
|
+
};
|
@@ -0,0 +1,27 @@
|
|
1
|
+
import { nanoid } from "nanoid";
|
2
|
+
import { parseJSON } from "../../core/schema/parseJSON.js";
|
3
|
+
export const jsonToolCallPrompt = {
|
4
|
+
text() {
|
5
|
+
return {
|
6
|
+
createPrompt(instruction, tool) {
|
7
|
+
return {
|
8
|
+
system: [
|
9
|
+
`You are calling a function "${tool.name}".`,
|
10
|
+
tool.description != null
|
11
|
+
? ` Function description: ${tool.description}`
|
12
|
+
: null,
|
13
|
+
` Function parameters JSON schema: ${JSON.stringify(tool.parameters.getJsonSchema())}`,
|
14
|
+
``,
|
15
|
+
`You MUST answer with a JSON object matches the above schema for the arguments.`,
|
16
|
+
]
|
17
|
+
.filter(Boolean)
|
18
|
+
.join("\n"),
|
19
|
+
instruction,
|
20
|
+
};
|
21
|
+
},
|
22
|
+
extractToolCall(response) {
|
23
|
+
return { id: nanoid(), args: parseJSON({ text: response }) };
|
24
|
+
},
|
25
|
+
};
|
26
|
+
},
|
27
|
+
};
|
File without changes
|
File without changes
|
/package/model-provider/mistral/{MistralPromptTemplate.d.ts → MistralChatPromptTemplate.d.ts}
RENAMED
File without changes
|
File without changes
|