modelfusion 0.117.0 → 0.119.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +60 -0
- package/README.md +10 -9
- package/core/getFunctionCallLogger.cjs +6 -6
- package/core/getFunctionCallLogger.js +6 -6
- package/model-function/ModelCallEvent.d.ts +1 -1
- package/model-function/embed/EmbeddingEvent.d.ts +1 -1
- package/model-function/embed/EmbeddingModel.d.ts +1 -1
- package/model-function/embed/embed.cjs +5 -5
- package/model-function/embed/embed.d.ts +2 -2
- package/model-function/embed/embed.js +5 -5
- package/model-function/executeStandardCall.cjs +3 -3
- package/model-function/executeStandardCall.d.ts +2 -2
- package/model-function/executeStandardCall.js +3 -3
- package/model-function/generate-image/ImageGenerationEvent.d.ts +1 -1
- package/model-function/generate-image/ImageGenerationModel.d.ts +1 -1
- package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +1 -1
- package/model-function/generate-image/generateImage.cjs +2 -2
- package/model-function/generate-image/generateImage.d.ts +1 -1
- package/model-function/generate-image/generateImage.js +2 -2
- package/model-function/generate-speech/SpeechGenerationEvent.d.ts +1 -1
- package/model-function/generate-speech/generateSpeech.cjs +2 -2
- package/model-function/generate-speech/generateSpeech.d.ts +1 -1
- package/model-function/generate-speech/generateSpeech.js +2 -2
- package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +10 -1
- package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +1 -0
- package/model-function/generate-structure/StructureFromTextGenerationModel.js +10 -1
- package/model-function/generate-structure/StructureFromTextPromptTemplate.d.ts +12 -1
- package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +1 -22
- package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +0 -5
- package/model-function/generate-structure/StructureFromTextStreamingModel.js +1 -22
- package/model-function/generate-structure/StructureGenerationEvent.d.ts +1 -1
- package/model-function/generate-structure/generateStructure.cjs +2 -2
- package/model-function/generate-structure/generateStructure.d.ts +1 -1
- package/model-function/generate-structure/generateStructure.js +2 -2
- package/model-function/generate-structure/jsonStructurePrompt.cjs +4 -12
- package/model-function/generate-structure/jsonStructurePrompt.js +4 -12
- package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +2 -2
- package/model-function/generate-text/PromptTemplateTextGenerationModel.cjs +6 -0
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +5 -2
- package/model-function/generate-text/PromptTemplateTextGenerationModel.js +6 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +6 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +3 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.js +6 -0
- package/model-function/generate-text/TextGenerationEvent.d.ts +1 -1
- package/model-function/generate-text/TextGenerationModel.d.ts +7 -4
- package/model-function/generate-text/generateText.cjs +3 -3
- package/model-function/generate-text/generateText.d.ts +1 -1
- package/model-function/generate-text/generateText.js +3 -3
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +8 -1
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.d.ts +5 -0
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +6 -0
- package/model-function/generate-text/prompt-template/PromptTemplateProvider.cjs +2 -0
- package/model-function/generate-text/prompt-template/PromptTemplateProvider.d.ts +8 -0
- package/model-function/generate-text/prompt-template/PromptTemplateProvider.js +1 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +34 -1
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.d.ts +9 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +31 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +28 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +29 -1
- package/model-function/generate-text/prompt-template/index.cjs +1 -0
- package/model-function/generate-text/prompt-template/index.d.ts +1 -0
- package/model-function/generate-text/prompt-template/index.js +1 -0
- package/model-function/generate-transcription/TranscriptionEvent.d.ts +1 -1
- package/model-function/generate-transcription/TranscriptionModel.d.ts +1 -1
- package/model-function/generate-transcription/generateTranscription.cjs +1 -1
- package/model-function/generate-transcription/generateTranscription.d.ts +1 -1
- package/model-function/generate-transcription/generateTranscription.js +1 -1
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +3 -3
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +1 -1
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +3 -3
- package/model-provider/cohere/CohereTextEmbeddingModel.cjs +3 -3
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +1 -1
- package/model-provider/cohere/CohereTextEmbeddingModel.js +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -3
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +5 -4
- package/model-provider/cohere/CohereTextGenerationModel.js +6 -3
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +3 -3
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +1 -1
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +3 -3
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +6 -3
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +5 -4
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +6 -3
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +15 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +4 -0
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +13 -0
- package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +40 -33
- package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +20 -9
- package/model-provider/llamacpp/LlamaCppCompletionModel.js +40 -33
- package/model-provider/llamacpp/LlamaCppFacade.cjs +4 -3
- package/model-provider/llamacpp/LlamaCppFacade.d.ts +2 -1
- package/model-provider/llamacpp/LlamaCppFacade.js +2 -1
- package/model-provider/llamacpp/LlamaCppGrammars.cjs +3 -1
- package/model-provider/llamacpp/LlamaCppGrammars.d.ts +1 -0
- package/model-provider/llamacpp/LlamaCppGrammars.js +1 -0
- package/model-provider/llamacpp/LlamaCppPrompt.cjs +59 -0
- package/model-provider/llamacpp/LlamaCppPrompt.d.ts +14 -0
- package/model-provider/llamacpp/LlamaCppPrompt.js +31 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +3 -3
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +1 -1
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +3 -3
- package/model-provider/llamacpp/convertJsonSchemaToGBNF.cjs +113 -0
- package/model-provider/llamacpp/convertJsonSchemaToGBNF.d.ts +7 -0
- package/model-provider/llamacpp/convertJsonSchemaToGBNF.js +109 -0
- package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.cjs +150 -0
- package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.d.ts +1 -0
- package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.js +148 -0
- package/model-provider/llamacpp/index.cjs +2 -3
- package/model-provider/llamacpp/index.d.ts +1 -2
- package/model-provider/llamacpp/index.js +1 -2
- package/model-provider/mistral/MistralChatModel.cjs +6 -3
- package/model-provider/mistral/MistralChatModel.d.ts +5 -4
- package/model-provider/mistral/MistralChatModel.js +6 -3
- package/model-provider/mistral/MistralTextEmbeddingModel.cjs +3 -3
- package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +1 -1
- package/model-provider/mistral/MistralTextEmbeddingModel.js +3 -3
- package/model-provider/ollama/OllamaChatModel.cjs +3 -3
- package/model-provider/ollama/OllamaChatModel.d.ts +2 -2
- package/model-provider/ollama/OllamaChatModel.js +3 -3
- package/model-provider/ollama/OllamaCompletionModel.cjs +6 -3
- package/model-provider/ollama/OllamaCompletionModel.d.ts +15 -14
- package/model-provider/ollama/OllamaCompletionModel.js +6 -3
- package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +3 -3
- package/model-provider/ollama/OllamaTextEmbeddingModel.d.ts +1 -1
- package/model-provider/ollama/OllamaTextEmbeddingModel.js +3 -3
- package/model-provider/openai/AbstractOpenAIChatModel.cjs +12 -12
- package/model-provider/openai/AbstractOpenAIChatModel.d.ts +6 -6
- package/model-provider/openai/AbstractOpenAIChatModel.js +12 -12
- package/model-provider/openai/AbstractOpenAICompletionModel.cjs +9 -6
- package/model-provider/openai/AbstractOpenAICompletionModel.d.ts +3 -2
- package/model-provider/openai/AbstractOpenAICompletionModel.js +9 -6
- package/model-provider/openai/OpenAIImageGenerationModel.cjs +3 -3
- package/model-provider/openai/OpenAIImageGenerationModel.d.ts +1 -1
- package/model-provider/openai/OpenAIImageGenerationModel.js +3 -3
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +3 -3
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +1 -1
- package/model-provider/openai/OpenAITextEmbeddingModel.js +3 -3
- package/model-provider/openai/OpenAITranscriptionModel.cjs +3 -3
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +1 -1
- package/model-provider/openai/OpenAITranscriptionModel.js +3 -3
- package/model-provider/stability/StabilityImageGenerationModel.cjs +3 -3
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +1 -1
- package/model-provider/stability/StabilityImageGenerationModel.js +3 -3
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +3 -3
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.d.ts +1 -1
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +3 -3
- package/package.json +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +2 -2
- package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.js +2 -2
- package/tool/generate-tool-call/ToolCallGenerationEvent.d.ts +1 -1
- package/tool/generate-tool-call/ToolCallGenerationModel.d.ts +1 -1
- package/tool/generate-tool-call/generateToolCall.cjs +2 -2
- package/tool/generate-tool-call/generateToolCall.js +2 -2
- package/tool/generate-tool-calls/TextGenerationToolCallsModel.cjs +2 -2
- package/tool/generate-tool-calls/TextGenerationToolCallsModel.d.ts +1 -1
- package/tool/generate-tool-calls/TextGenerationToolCallsModel.js +2 -2
- package/tool/generate-tool-calls/ToolCallsGenerationEvent.d.ts +1 -1
- package/tool/generate-tool-calls/ToolCallsGenerationModel.d.ts +1 -1
- package/tool/generate-tool-calls/generateToolCalls.cjs +2 -2
- package/tool/generate-tool-calls/generateToolCalls.d.ts +1 -1
- package/tool/generate-tool-calls/generateToolCalls.js +2 -2
@@ -0,0 +1,150 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
const convertJsonSchemaToGBNF_js_1 = require("./convertJsonSchemaToGBNF.cjs");
|
4
|
+
describe("primitives", () => {
|
5
|
+
it("should convert string", () => {
|
6
|
+
expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
|
7
|
+
type: "string",
|
8
|
+
})).toMatchSnapshot();
|
9
|
+
});
|
10
|
+
it("should convert number", () => {
|
11
|
+
expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
|
12
|
+
type: "number",
|
13
|
+
})).toMatchSnapshot();
|
14
|
+
});
|
15
|
+
it("should convert integer", () => {
|
16
|
+
expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
|
17
|
+
type: "integer",
|
18
|
+
})).toMatchSnapshot();
|
19
|
+
});
|
20
|
+
it("should convert boolean", () => {
|
21
|
+
expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
|
22
|
+
type: "boolean",
|
23
|
+
})).toMatchSnapshot();
|
24
|
+
});
|
25
|
+
it("should convert null", () => {
|
26
|
+
expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
|
27
|
+
type: "null",
|
28
|
+
})).toMatchSnapshot();
|
29
|
+
});
|
30
|
+
});
|
31
|
+
describe("array", () => {
|
32
|
+
it("should convert array of string", () => {
|
33
|
+
expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
|
34
|
+
type: "array",
|
35
|
+
items: {
|
36
|
+
type: "string",
|
37
|
+
},
|
38
|
+
})).toMatchSnapshot();
|
39
|
+
});
|
40
|
+
it("should convert array of array of string", () => {
|
41
|
+
expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
|
42
|
+
type: "array",
|
43
|
+
items: {
|
44
|
+
type: "array",
|
45
|
+
items: {
|
46
|
+
type: "string",
|
47
|
+
},
|
48
|
+
},
|
49
|
+
})).toMatchSnapshot();
|
50
|
+
});
|
51
|
+
it("should convert array of object", () => {
|
52
|
+
expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
|
53
|
+
type: "array",
|
54
|
+
items: {
|
55
|
+
type: "object",
|
56
|
+
properties: {
|
57
|
+
name: {
|
58
|
+
type: "string",
|
59
|
+
},
|
60
|
+
age: {
|
61
|
+
type: "number",
|
62
|
+
},
|
63
|
+
},
|
64
|
+
},
|
65
|
+
})).toMatchSnapshot();
|
66
|
+
});
|
67
|
+
});
|
68
|
+
describe("object", () => {
|
69
|
+
it("should convert object", () => {
|
70
|
+
expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
|
71
|
+
type: "object",
|
72
|
+
properties: {
|
73
|
+
name: {
|
74
|
+
type: "string",
|
75
|
+
},
|
76
|
+
age: {
|
77
|
+
type: "number",
|
78
|
+
},
|
79
|
+
},
|
80
|
+
})).toMatchSnapshot();
|
81
|
+
});
|
82
|
+
it("should convert object with required properties", () => {
|
83
|
+
expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
|
84
|
+
type: "object",
|
85
|
+
properties: {
|
86
|
+
name: {
|
87
|
+
type: "string",
|
88
|
+
},
|
89
|
+
age: {
|
90
|
+
type: "number",
|
91
|
+
},
|
92
|
+
},
|
93
|
+
required: ["name"],
|
94
|
+
})).toMatchSnapshot();
|
95
|
+
});
|
96
|
+
it("should convert object with additional properties", () => {
|
97
|
+
expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
|
98
|
+
type: "object",
|
99
|
+
properties: {
|
100
|
+
name: {
|
101
|
+
type: "string",
|
102
|
+
},
|
103
|
+
age: {
|
104
|
+
type: "number",
|
105
|
+
},
|
106
|
+
},
|
107
|
+
additionalProperties: true,
|
108
|
+
})).toMatchSnapshot();
|
109
|
+
});
|
110
|
+
it("should convert object with additional properties of string", () => {
|
111
|
+
expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
|
112
|
+
type: "object",
|
113
|
+
properties: {
|
114
|
+
name: {
|
115
|
+
type: "string",
|
116
|
+
},
|
117
|
+
age: {
|
118
|
+
type: "number",
|
119
|
+
},
|
120
|
+
},
|
121
|
+
additionalProperties: {
|
122
|
+
type: "string",
|
123
|
+
},
|
124
|
+
})).toMatchSnapshot();
|
125
|
+
});
|
126
|
+
it("should convert object with additional properties of object", () => {
|
127
|
+
expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
|
128
|
+
type: "object",
|
129
|
+
properties: {
|
130
|
+
name: {
|
131
|
+
type: "string",
|
132
|
+
},
|
133
|
+
age: {
|
134
|
+
type: "number",
|
135
|
+
},
|
136
|
+
},
|
137
|
+
additionalProperties: {
|
138
|
+
type: "object",
|
139
|
+
properties: {
|
140
|
+
name: {
|
141
|
+
type: "string",
|
142
|
+
},
|
143
|
+
age: {
|
144
|
+
type: "number",
|
145
|
+
},
|
146
|
+
},
|
147
|
+
},
|
148
|
+
})).toMatchSnapshot();
|
149
|
+
});
|
150
|
+
});
|
@@ -0,0 +1 @@
|
|
1
|
+
export {};
|
@@ -0,0 +1,148 @@
|
|
1
|
+
import { convertJsonSchemaToGBNF } from "./convertJsonSchemaToGBNF.js";
|
2
|
+
describe("primitives", () => {
|
3
|
+
it("should convert string", () => {
|
4
|
+
expect(convertJsonSchemaToGBNF({
|
5
|
+
type: "string",
|
6
|
+
})).toMatchSnapshot();
|
7
|
+
});
|
8
|
+
it("should convert number", () => {
|
9
|
+
expect(convertJsonSchemaToGBNF({
|
10
|
+
type: "number",
|
11
|
+
})).toMatchSnapshot();
|
12
|
+
});
|
13
|
+
it("should convert integer", () => {
|
14
|
+
expect(convertJsonSchemaToGBNF({
|
15
|
+
type: "integer",
|
16
|
+
})).toMatchSnapshot();
|
17
|
+
});
|
18
|
+
it("should convert boolean", () => {
|
19
|
+
expect(convertJsonSchemaToGBNF({
|
20
|
+
type: "boolean",
|
21
|
+
})).toMatchSnapshot();
|
22
|
+
});
|
23
|
+
it("should convert null", () => {
|
24
|
+
expect(convertJsonSchemaToGBNF({
|
25
|
+
type: "null",
|
26
|
+
})).toMatchSnapshot();
|
27
|
+
});
|
28
|
+
});
|
29
|
+
describe("array", () => {
|
30
|
+
it("should convert array of string", () => {
|
31
|
+
expect(convertJsonSchemaToGBNF({
|
32
|
+
type: "array",
|
33
|
+
items: {
|
34
|
+
type: "string",
|
35
|
+
},
|
36
|
+
})).toMatchSnapshot();
|
37
|
+
});
|
38
|
+
it("should convert array of array of string", () => {
|
39
|
+
expect(convertJsonSchemaToGBNF({
|
40
|
+
type: "array",
|
41
|
+
items: {
|
42
|
+
type: "array",
|
43
|
+
items: {
|
44
|
+
type: "string",
|
45
|
+
},
|
46
|
+
},
|
47
|
+
})).toMatchSnapshot();
|
48
|
+
});
|
49
|
+
it("should convert array of object", () => {
|
50
|
+
expect(convertJsonSchemaToGBNF({
|
51
|
+
type: "array",
|
52
|
+
items: {
|
53
|
+
type: "object",
|
54
|
+
properties: {
|
55
|
+
name: {
|
56
|
+
type: "string",
|
57
|
+
},
|
58
|
+
age: {
|
59
|
+
type: "number",
|
60
|
+
},
|
61
|
+
},
|
62
|
+
},
|
63
|
+
})).toMatchSnapshot();
|
64
|
+
});
|
65
|
+
});
|
66
|
+
describe("object", () => {
|
67
|
+
it("should convert object", () => {
|
68
|
+
expect(convertJsonSchemaToGBNF({
|
69
|
+
type: "object",
|
70
|
+
properties: {
|
71
|
+
name: {
|
72
|
+
type: "string",
|
73
|
+
},
|
74
|
+
age: {
|
75
|
+
type: "number",
|
76
|
+
},
|
77
|
+
},
|
78
|
+
})).toMatchSnapshot();
|
79
|
+
});
|
80
|
+
it("should convert object with required properties", () => {
|
81
|
+
expect(convertJsonSchemaToGBNF({
|
82
|
+
type: "object",
|
83
|
+
properties: {
|
84
|
+
name: {
|
85
|
+
type: "string",
|
86
|
+
},
|
87
|
+
age: {
|
88
|
+
type: "number",
|
89
|
+
},
|
90
|
+
},
|
91
|
+
required: ["name"],
|
92
|
+
})).toMatchSnapshot();
|
93
|
+
});
|
94
|
+
it("should convert object with additional properties", () => {
|
95
|
+
expect(convertJsonSchemaToGBNF({
|
96
|
+
type: "object",
|
97
|
+
properties: {
|
98
|
+
name: {
|
99
|
+
type: "string",
|
100
|
+
},
|
101
|
+
age: {
|
102
|
+
type: "number",
|
103
|
+
},
|
104
|
+
},
|
105
|
+
additionalProperties: true,
|
106
|
+
})).toMatchSnapshot();
|
107
|
+
});
|
108
|
+
it("should convert object with additional properties of string", () => {
|
109
|
+
expect(convertJsonSchemaToGBNF({
|
110
|
+
type: "object",
|
111
|
+
properties: {
|
112
|
+
name: {
|
113
|
+
type: "string",
|
114
|
+
},
|
115
|
+
age: {
|
116
|
+
type: "number",
|
117
|
+
},
|
118
|
+
},
|
119
|
+
additionalProperties: {
|
120
|
+
type: "string",
|
121
|
+
},
|
122
|
+
})).toMatchSnapshot();
|
123
|
+
});
|
124
|
+
it("should convert object with additional properties of object", () => {
|
125
|
+
expect(convertJsonSchemaToGBNF({
|
126
|
+
type: "object",
|
127
|
+
properties: {
|
128
|
+
name: {
|
129
|
+
type: "string",
|
130
|
+
},
|
131
|
+
age: {
|
132
|
+
type: "number",
|
133
|
+
},
|
134
|
+
},
|
135
|
+
additionalProperties: {
|
136
|
+
type: "object",
|
137
|
+
properties: {
|
138
|
+
name: {
|
139
|
+
type: "string",
|
140
|
+
},
|
141
|
+
age: {
|
142
|
+
type: "number",
|
143
|
+
},
|
144
|
+
},
|
145
|
+
},
|
146
|
+
})).toMatchSnapshot();
|
147
|
+
});
|
148
|
+
});
|
@@ -26,10 +26,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
26
26
|
return result;
|
27
27
|
};
|
28
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
29
|
-
exports.llamacpp =
|
29
|
+
exports.llamacpp = void 0;
|
30
30
|
__exportStar(require("./LlamaCppApiConfiguration.cjs"), exports);
|
31
|
-
|
31
|
+
__exportStar(require("./LlamaCppCompletionModel.cjs"), exports);
|
32
32
|
exports.llamacpp = __importStar(require("./LlamaCppFacade.cjs"));
|
33
33
|
__exportStar(require("./LlamaCppTextEmbeddingModel.cjs"), exports);
|
34
|
-
__exportStar(require("./LlamaCppCompletionModel.cjs"), exports);
|
35
34
|
__exportStar(require("./LlamaCppTokenizer.cjs"), exports);
|
@@ -1,7 +1,6 @@
|
|
1
1
|
export * from "./LlamaCppApiConfiguration.js";
|
2
|
-
export *
|
2
|
+
export * from "./LlamaCppCompletionModel.js";
|
3
3
|
export { LlamaCppErrorData } from "./LlamaCppError.js";
|
4
4
|
export * as llamacpp from "./LlamaCppFacade.js";
|
5
5
|
export * from "./LlamaCppTextEmbeddingModel.js";
|
6
|
-
export * from "./LlamaCppCompletionModel.js";
|
7
6
|
export * from "./LlamaCppTokenizer.js";
|
@@ -1,6 +1,5 @@
|
|
1
1
|
export * from "./LlamaCppApiConfiguration.js";
|
2
|
-
export *
|
2
|
+
export * from "./LlamaCppCompletionModel.js";
|
3
3
|
export * as llamacpp from "./LlamaCppFacade.js";
|
4
4
|
export * from "./LlamaCppTextEmbeddingModel.js";
|
5
|
-
export * from "./LlamaCppCompletionModel.js";
|
6
5
|
export * from "./LlamaCppTokenizer.js";
|
@@ -97,10 +97,10 @@ class MistralChatModel extends AbstractModel_js_1.AbstractModel {
|
|
97
97
|
schema: (0, ZodSchema_js_1.zodSchema)(mistralChatResponseSchema),
|
98
98
|
}));
|
99
99
|
}
|
100
|
-
processTextGenerationResponse(
|
100
|
+
processTextGenerationResponse(rawResponse) {
|
101
101
|
return {
|
102
|
-
|
103
|
-
textGenerationResults:
|
102
|
+
rawResponse,
|
103
|
+
textGenerationResults: rawResponse.choices.map((choice) => ({
|
104
104
|
text: choice.message.content,
|
105
105
|
finishReason: this.translateFinishReason(choice.finish_reason),
|
106
106
|
})),
|
@@ -144,6 +144,9 @@ class MistralChatModel extends AbstractModel_js_1.AbstractModel {
|
|
144
144
|
withChatPrompt() {
|
145
145
|
return this.withPromptTemplate((0, MistralChatPromptTemplate_js_1.chat)());
|
146
146
|
}
|
147
|
+
withJsonOutput() {
|
148
|
+
return this;
|
149
|
+
}
|
147
150
|
withPromptTemplate(promptTemplate) {
|
148
151
|
return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
|
149
152
|
model: this, // stop tokens are not supported by this model
|
@@ -59,7 +59,7 @@ export declare class MistralChatModel extends AbstractModel<MistralChatModelSett
|
|
59
59
|
}): Promise<RESULT>;
|
60
60
|
get settingsForEvent(): Partial<MistralChatModelSettings>;
|
61
61
|
doGenerateTexts(prompt: MistralChatPrompt, options: FunctionCallOptions): Promise<{
|
62
|
-
|
62
|
+
rawResponse: {
|
63
63
|
object: string;
|
64
64
|
model: string;
|
65
65
|
usage: {
|
@@ -84,7 +84,7 @@ export declare class MistralChatModel extends AbstractModel<MistralChatModelSett
|
|
84
84
|
}[];
|
85
85
|
}>;
|
86
86
|
restoreGeneratedTexts(rawResponse: unknown): {
|
87
|
-
|
87
|
+
rawResponse: {
|
88
88
|
object: string;
|
89
89
|
model: string;
|
90
90
|
usage: {
|
@@ -108,8 +108,8 @@ export declare class MistralChatModel extends AbstractModel<MistralChatModelSett
|
|
108
108
|
finishReason: TextGenerationFinishReason;
|
109
109
|
}[];
|
110
110
|
};
|
111
|
-
processTextGenerationResponse(
|
112
|
-
|
111
|
+
processTextGenerationResponse(rawResponse: MistralChatResponse): {
|
112
|
+
rawResponse: {
|
113
113
|
object: string;
|
114
114
|
model: string;
|
115
115
|
usage: {
|
@@ -161,6 +161,7 @@ export declare class MistralChatModel extends AbstractModel<MistralChatModelSett
|
|
161
161
|
* Returns this model with a chat prompt template.
|
162
162
|
*/
|
163
163
|
withChatPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").ChatPrompt, MistralChatPrompt, MistralChatModelSettings, this>;
|
164
|
+
withJsonOutput(): this;
|
164
165
|
withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, MistralChatPrompt>): PromptTemplateTextStreamingModel<INPUT_PROMPT, MistralChatPrompt, MistralChatModelSettings, this>;
|
165
166
|
withSettings(additionalSettings: Partial<MistralChatModelSettings>): this;
|
166
167
|
}
|
@@ -94,10 +94,10 @@ export class MistralChatModel extends AbstractModel {
|
|
94
94
|
schema: zodSchema(mistralChatResponseSchema),
|
95
95
|
}));
|
96
96
|
}
|
97
|
-
processTextGenerationResponse(
|
97
|
+
processTextGenerationResponse(rawResponse) {
|
98
98
|
return {
|
99
|
-
|
100
|
-
textGenerationResults:
|
99
|
+
rawResponse,
|
100
|
+
textGenerationResults: rawResponse.choices.map((choice) => ({
|
101
101
|
text: choice.message.content,
|
102
102
|
finishReason: this.translateFinishReason(choice.finish_reason),
|
103
103
|
})),
|
@@ -141,6 +141,9 @@ export class MistralChatModel extends AbstractModel {
|
|
141
141
|
withChatPrompt() {
|
142
142
|
return this.withPromptTemplate(chat());
|
143
143
|
}
|
144
|
+
withJsonOutput() {
|
145
|
+
return this;
|
146
|
+
}
|
144
147
|
withPromptTemplate(promptTemplate) {
|
145
148
|
return new PromptTemplateTextStreamingModel({
|
146
149
|
model: this, // stop tokens are not supported by this model
|
@@ -79,10 +79,10 @@ class MistralTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
79
79
|
};
|
80
80
|
}
|
81
81
|
async doEmbedValues(texts, options) {
|
82
|
-
const
|
82
|
+
const rawResponse = await this.callAPI(texts, options);
|
83
83
|
return {
|
84
|
-
|
85
|
-
embeddings:
|
84
|
+
rawResponse,
|
85
|
+
embeddings: rawResponse.data.map((entry) => entry.embedding),
|
86
86
|
};
|
87
87
|
}
|
88
88
|
withSettings(additionalSettings) {
|
@@ -30,7 +30,7 @@ export declare class MistralTextEmbeddingModel extends AbstractModel<MistralText
|
|
30
30
|
callAPI(texts: Array<string>, callOptions: FunctionCallOptions): Promise<MistralTextEmbeddingResponse>;
|
31
31
|
get settingsForEvent(): Partial<MistralTextEmbeddingModelSettings>;
|
32
32
|
doEmbedValues(texts: string[], options: FunctionCallOptions): Promise<{
|
33
|
-
|
33
|
+
rawResponse: {
|
34
34
|
object: string;
|
35
35
|
model: string;
|
36
36
|
usage: {
|
@@ -76,10 +76,10 @@ export class MistralTextEmbeddingModel extends AbstractModel {
|
|
76
76
|
};
|
77
77
|
}
|
78
78
|
async doEmbedValues(texts, options) {
|
79
|
-
const
|
79
|
+
const rawResponse = await this.callAPI(texts, options);
|
80
80
|
return {
|
81
|
-
|
82
|
-
embeddings:
|
81
|
+
rawResponse,
|
82
|
+
embeddings: rawResponse.data.map((entry) => entry.embedding),
|
83
83
|
};
|
84
84
|
}
|
85
85
|
withSettings(additionalSettings) {
|
@@ -129,12 +129,12 @@ class OllamaChatModel extends AbstractModel_js_1.AbstractModel {
|
|
129
129
|
schema: (0, ZodSchema_js_1.zodSchema)(ollamaChatResponseSchema),
|
130
130
|
}));
|
131
131
|
}
|
132
|
-
processTextGenerationResponse(
|
132
|
+
processTextGenerationResponse(rawResponse) {
|
133
133
|
return {
|
134
|
-
|
134
|
+
rawResponse,
|
135
135
|
textGenerationResults: [
|
136
136
|
{
|
137
|
-
text:
|
137
|
+
text: rawResponse.message.content,
|
138
138
|
finishReason: "unknown",
|
139
139
|
},
|
140
140
|
],
|
@@ -39,7 +39,7 @@ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettin
|
|
39
39
|
}): Promise<RESPONSE>;
|
40
40
|
get settingsForEvent(): Partial<OllamaChatModelSettings>;
|
41
41
|
doGenerateTexts(prompt: OllamaChatPrompt, options: FunctionCallOptions): Promise<{
|
42
|
-
|
42
|
+
rawResponse: {
|
43
43
|
model: string;
|
44
44
|
message: {
|
45
45
|
role: string;
|
@@ -60,7 +60,7 @@ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettin
|
|
60
60
|
}[];
|
61
61
|
}>;
|
62
62
|
restoreGeneratedTexts(rawResponse: unknown): {
|
63
|
-
|
63
|
+
rawResponse: {
|
64
64
|
model: string;
|
65
65
|
message: {
|
66
66
|
role: string;
|
@@ -126,12 +126,12 @@ export class OllamaChatModel extends AbstractModel {
|
|
126
126
|
schema: zodSchema(ollamaChatResponseSchema),
|
127
127
|
}));
|
128
128
|
}
|
129
|
-
processTextGenerationResponse(
|
129
|
+
processTextGenerationResponse(rawResponse) {
|
130
130
|
return {
|
131
|
-
|
131
|
+
rawResponse,
|
132
132
|
textGenerationResults: [
|
133
133
|
{
|
134
|
-
text:
|
134
|
+
text: rawResponse.message.content,
|
135
135
|
finishReason: "unknown",
|
136
136
|
},
|
137
137
|
],
|
@@ -130,12 +130,12 @@ class OllamaCompletionModel extends AbstractModel_js_1.AbstractModel {
|
|
130
130
|
schema: (0, ZodSchema_js_1.zodSchema)(ollamaCompletionResponseSchema),
|
131
131
|
}));
|
132
132
|
}
|
133
|
-
processTextGenerationResponse(
|
133
|
+
processTextGenerationResponse(rawResponse) {
|
134
134
|
return {
|
135
|
-
|
135
|
+
rawResponse,
|
136
136
|
textGenerationResults: [
|
137
137
|
{
|
138
|
-
text:
|
138
|
+
text: rawResponse.response,
|
139
139
|
finishReason: "unknown",
|
140
140
|
},
|
141
141
|
],
|
@@ -163,6 +163,9 @@ class OllamaCompletionModel extends AbstractModel_js_1.AbstractModel {
|
|
163
163
|
template: promptTemplate,
|
164
164
|
});
|
165
165
|
}
|
166
|
+
withJsonOutput() {
|
167
|
+
return this;
|
168
|
+
}
|
166
169
|
withTextPrompt() {
|
167
170
|
return this.withPromptTemplate({
|
168
171
|
format(prompt) {
|
@@ -52,10 +52,10 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
|
|
52
52
|
}): Promise<RESPONSE>;
|
53
53
|
get settingsForEvent(): Partial<OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>>;
|
54
54
|
doGenerateTexts(prompt: OllamaCompletionPrompt, options: FunctionCallOptions): Promise<{
|
55
|
-
|
56
|
-
response: string;
|
55
|
+
rawResponse: {
|
57
56
|
model: string;
|
58
57
|
done: true;
|
58
|
+
response: string;
|
59
59
|
created_at: string;
|
60
60
|
total_duration: number;
|
61
61
|
prompt_eval_count: number;
|
@@ -71,10 +71,10 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
|
|
71
71
|
}[];
|
72
72
|
}>;
|
73
73
|
restoreGeneratedTexts(rawResponse: unknown): {
|
74
|
-
|
75
|
-
response: string;
|
74
|
+
rawResponse: {
|
76
75
|
model: string;
|
77
76
|
done: true;
|
77
|
+
response: string;
|
78
78
|
created_at: string;
|
79
79
|
total_duration: number;
|
80
80
|
prompt_eval_count: number;
|
@@ -89,11 +89,11 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
|
|
89
89
|
finishReason: "unknown";
|
90
90
|
}[];
|
91
91
|
};
|
92
|
-
processTextGenerationResponse(
|
93
|
-
|
94
|
-
response: string;
|
92
|
+
processTextGenerationResponse(rawResponse: OllamaCompletionResponse): {
|
93
|
+
rawResponse: {
|
95
94
|
model: string;
|
96
95
|
done: true;
|
96
|
+
response: string;
|
97
97
|
created_at: string;
|
98
98
|
total_duration: number;
|
99
99
|
prompt_eval_count: number;
|
@@ -109,9 +109,9 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
|
|
109
109
|
}[];
|
110
110
|
};
|
111
111
|
doStreamText(prompt: OllamaCompletionPrompt, options: FunctionCallOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
|
112
|
-
response: string;
|
113
112
|
model: string;
|
114
113
|
done: false;
|
114
|
+
response: string;
|
115
115
|
created_at: string;
|
116
116
|
} | {
|
117
117
|
model: string;
|
@@ -130,6 +130,7 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
|
|
130
130
|
extractTextDelta(delta: unknown): string | undefined;
|
131
131
|
asToolCallGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallPromptTemplate<INPUT_PROMPT, OllamaCompletionPrompt>): TextGenerationToolCallModel<INPUT_PROMPT, OllamaCompletionPrompt, this>;
|
132
132
|
asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallsPromptTemplate<INPUT_PROMPT, OllamaCompletionPrompt>): TextGenerationToolCallsModel<INPUT_PROMPT, OllamaCompletionPrompt, this>;
|
133
|
+
withJsonOutput(): this;
|
133
134
|
withTextPrompt(): PromptTemplateTextStreamingModel<string, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, this>;
|
134
135
|
/**
|
135
136
|
* Maps the prompt for a text version of the Ollama completion prompt template (without image support).
|
@@ -151,9 +152,9 @@ declare const ollamaCompletionResponseSchema: z.ZodObject<{
|
|
151
152
|
eval_duration: z.ZodNumber;
|
152
153
|
context: z.ZodOptional<z.ZodArray<z.ZodNumber, "many">>;
|
153
154
|
}, "strip", z.ZodTypeAny, {
|
154
|
-
response: string;
|
155
155
|
model: string;
|
156
156
|
done: true;
|
157
|
+
response: string;
|
157
158
|
created_at: string;
|
158
159
|
total_duration: number;
|
159
160
|
prompt_eval_count: number;
|
@@ -163,9 +164,9 @@ declare const ollamaCompletionResponseSchema: z.ZodObject<{
|
|
163
164
|
prompt_eval_duration?: number | undefined;
|
164
165
|
context?: number[] | undefined;
|
165
166
|
}, {
|
166
|
-
response: string;
|
167
167
|
model: string;
|
168
168
|
done: true;
|
169
|
+
response: string;
|
169
170
|
created_at: string;
|
170
171
|
total_duration: number;
|
171
172
|
prompt_eval_count: number;
|
@@ -182,14 +183,14 @@ declare const ollamaCompletionStreamChunkSchema: z.ZodDiscriminatedUnion<"done",
|
|
182
183
|
created_at: z.ZodString;
|
183
184
|
response: z.ZodString;
|
184
185
|
}, "strip", z.ZodTypeAny, {
|
185
|
-
response: string;
|
186
186
|
model: string;
|
187
187
|
done: false;
|
188
|
+
response: string;
|
188
189
|
created_at: string;
|
189
190
|
}, {
|
190
|
-
response: string;
|
191
191
|
model: string;
|
192
192
|
done: false;
|
193
|
+
response: string;
|
193
194
|
created_at: string;
|
194
195
|
}>, z.ZodObject<{
|
195
196
|
done: z.ZodLiteral<true>;
|
@@ -247,9 +248,9 @@ export declare const OllamaCompletionResponseFormat: {
|
|
247
248
|
requestBodyValues: unknown;
|
248
249
|
response: Response;
|
249
250
|
}) => Promise<{
|
250
|
-
response: string;
|
251
251
|
model: string;
|
252
252
|
done: true;
|
253
|
+
response: string;
|
253
254
|
created_at: string;
|
254
255
|
total_duration: number;
|
255
256
|
prompt_eval_count: number;
|
@@ -269,9 +270,9 @@ export declare const OllamaCompletionResponseFormat: {
|
|
269
270
|
handler: ({ response }: {
|
270
271
|
response: Response;
|
271
272
|
}) => Promise<AsyncIterable<import("../../index.js").Delta<{
|
272
|
-
response: string;
|
273
273
|
model: string;
|
274
274
|
done: false;
|
275
|
+
response: string;
|
275
276
|
created_at: string;
|
276
277
|
} | {
|
277
278
|
model: string;
|