modelfusion 0.105.0 → 0.107.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +26 -0
- package/README.md +16 -59
- package/core/DefaultRun.cjs +0 -4
- package/core/DefaultRun.d.ts +0 -2
- package/core/DefaultRun.js +0 -4
- package/core/ExtensionFunctionEvent.d.ts +11 -0
- package/core/FunctionEvent.d.ts +2 -2
- package/extension/index.cjs +22 -3
- package/extension/index.d.ts +5 -1
- package/extension/index.js +4 -1
- package/index.cjs +0 -3
- package/index.d.ts +0 -3
- package/index.js +0 -3
- package/model-function/generate-structure/jsonStructurePrompt.cjs +42 -6
- package/model-function/generate-structure/jsonStructurePrompt.d.ts +12 -1
- package/model-function/generate-structure/jsonStructurePrompt.js +42 -5
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextGenerationModel.js +1 -1
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +10 -8
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.d.ts +1 -1
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +10 -8
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.cjs +150 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.d.ts +62 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.js +143 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.cjs +60 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.js +58 -0
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/TextPromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/TextPromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/index.cjs +2 -1
- package/model-function/generate-text/prompt-template/index.d.ts +1 -0
- package/model-function/generate-text/prompt-template/index.js +1 -0
- package/model-function/index.cjs +0 -1
- package/model-function/index.d.ts +0 -1
- package/model-function/index.js +0 -1
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +6 -6
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +3 -3
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.cjs → LlamaCppCompletionModel.cjs} +8 -8
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.d.ts → LlamaCppCompletionModel.d.ts} +49 -49
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.js → LlamaCppCompletionModel.js} +6 -6
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.test.cjs → LlamaCppCompletionModel.test.cjs} +3 -3
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.test.js → LlamaCppCompletionModel.test.js} +3 -3
- package/model-provider/llamacpp/LlamaCppFacade.cjs +2 -2
- package/model-provider/llamacpp/LlamaCppFacade.d.ts +2 -2
- package/model-provider/llamacpp/LlamaCppFacade.js +2 -2
- package/model-provider/llamacpp/index.cjs +1 -1
- package/model-provider/llamacpp/index.d.ts +1 -1
- package/model-provider/llamacpp/index.js +1 -1
- package/model-provider/mistral/MistralChatModel.cjs +4 -4
- package/model-provider/mistral/MistralChatModel.d.ts +6 -6
- package/model-provider/mistral/MistralChatModel.js +1 -1
- package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +13 -13
- package/model-provider/mistral/index.cjs +3 -3
- package/model-provider/mistral/index.d.ts +2 -2
- package/model-provider/mistral/index.js +2 -2
- package/model-provider/ollama/OllamaChatModel.d.ts +9 -8
- package/model-provider/ollama/OllamaChatModel.js +1 -1
- package/model-provider/ollama/OllamaCompletionModel.d.ts +2 -1
- package/model-provider/ollama/OllamaCompletionModel.js +1 -1
- package/model-provider/ollama/OllamaCompletionModel.test.cjs +1 -7
- package/model-provider/ollama/OllamaCompletionModel.test.js +1 -7
- package/model-provider/openai/AbstractOpenAIChatModel.d.ts +8 -8
- package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.d.ts +1 -1
- package/model-provider/openai/OpenAICompletionModel.d.ts +6 -6
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +12 -12
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +11 -11
- package/model-provider/openai/index.cjs +0 -1
- package/model-provider/openai/index.d.ts +0 -1
- package/model-provider/openai/index.js +0 -1
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +5 -5
- package/package.json +9 -20
- package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.js +1 -1
- package/tool/generate-tool-call/index.cjs +1 -0
- package/tool/generate-tool-call/index.d.ts +1 -0
- package/tool/generate-tool-call/index.js +1 -0
- package/tool/generate-tool-call/jsonToolCallPrompt.cjs +30 -0
- package/tool/generate-tool-call/jsonToolCallPrompt.d.ts +5 -0
- package/tool/generate-tool-call/jsonToolCallPrompt.js +27 -0
- package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +1 -11
- package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.d.ts +12 -0
- package/tool/generate-tool-calls-or-text/index.cjs +1 -0
- package/tool/generate-tool-calls-or-text/index.d.ts +1 -0
- package/tool/generate-tool-calls-or-text/index.js +1 -0
- package/util/index.cjs +0 -1
- package/util/index.d.ts +0 -1
- package/util/index.js +0 -1
- package/browser/MediaSourceAppender.cjs +0 -54
- package/browser/MediaSourceAppender.d.ts +0 -11
- package/browser/MediaSourceAppender.js +0 -50
- package/browser/convertAudioChunksToBase64.cjs +0 -8
- package/browser/convertAudioChunksToBase64.d.ts +0 -4
- package/browser/convertAudioChunksToBase64.js +0 -4
- package/browser/convertBlobToBase64.cjs +0 -23
- package/browser/convertBlobToBase64.d.ts +0 -1
- package/browser/convertBlobToBase64.js +0 -19
- package/browser/index.cjs +0 -22
- package/browser/index.d.ts +0 -6
- package/browser/index.js +0 -6
- package/browser/invokeFlow.cjs +0 -23
- package/browser/invokeFlow.d.ts +0 -8
- package/browser/invokeFlow.js +0 -19
- package/browser/readEventSource.cjs +0 -29
- package/browser/readEventSource.d.ts +0 -9
- package/browser/readEventSource.js +0 -25
- package/browser/readEventSourceStream.cjs +0 -35
- package/browser/readEventSourceStream.d.ts +0 -7
- package/browser/readEventSourceStream.js +0 -31
- package/composed-function/index.cjs +0 -19
- package/composed-function/index.d.ts +0 -3
- package/composed-function/index.js +0 -3
- package/composed-function/summarize/SummarizationFunction.d.ts +0 -4
- package/composed-function/summarize/summarizeRecursively.cjs +0 -19
- package/composed-function/summarize/summarizeRecursively.d.ts +0 -11
- package/composed-function/summarize/summarizeRecursively.js +0 -15
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +0 -25
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +0 -24
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +0 -21
- package/cost/Cost.cjs +0 -38
- package/cost/Cost.d.ts +0 -16
- package/cost/Cost.js +0 -34
- package/cost/CostCalculator.d.ts +0 -8
- package/cost/calculateCost.cjs +0 -28
- package/cost/calculateCost.d.ts +0 -7
- package/cost/calculateCost.js +0 -24
- package/cost/index.cjs +0 -19
- package/cost/index.d.ts +0 -3
- package/cost/index.js +0 -3
- package/guard/GuardEvent.cjs +0 -2
- package/guard/GuardEvent.d.ts +0 -7
- package/guard/fixStructure.cjs +0 -75
- package/guard/fixStructure.d.ts +0 -64
- package/guard/fixStructure.js +0 -71
- package/guard/guard.cjs +0 -79
- package/guard/guard.d.ts +0 -29
- package/guard/guard.js +0 -75
- package/guard/index.cjs +0 -19
- package/guard/index.d.ts +0 -3
- package/guard/index.js +0 -3
- package/model-function/SuccessfulModelCall.cjs +0 -10
- package/model-function/SuccessfulModelCall.d.ts +0 -12
- package/model-function/SuccessfulModelCall.js +0 -6
- package/model-provider/openai/OpenAICostCalculator.cjs +0 -89
- package/model-provider/openai/OpenAICostCalculator.d.ts +0 -6
- package/model-provider/openai/OpenAICostCalculator.js +0 -85
- package/server/fastify/AssetStorage.cjs +0 -2
- package/server/fastify/AssetStorage.d.ts +0 -17
- package/server/fastify/AssetStorage.js +0 -1
- package/server/fastify/DefaultFlow.cjs +0 -22
- package/server/fastify/DefaultFlow.d.ts +0 -16
- package/server/fastify/DefaultFlow.js +0 -18
- package/server/fastify/FileSystemAssetStorage.cjs +0 -60
- package/server/fastify/FileSystemAssetStorage.d.ts +0 -19
- package/server/fastify/FileSystemAssetStorage.js +0 -56
- package/server/fastify/FileSystemLogger.cjs +0 -49
- package/server/fastify/FileSystemLogger.d.ts +0 -18
- package/server/fastify/FileSystemLogger.js +0 -45
- package/server/fastify/Flow.cjs +0 -2
- package/server/fastify/Flow.d.ts +0 -9
- package/server/fastify/Flow.js +0 -1
- package/server/fastify/FlowRun.cjs +0 -71
- package/server/fastify/FlowRun.d.ts +0 -28
- package/server/fastify/FlowRun.js +0 -67
- package/server/fastify/FlowSchema.cjs +0 -2
- package/server/fastify/FlowSchema.d.ts +0 -5
- package/server/fastify/FlowSchema.js +0 -1
- package/server/fastify/Logger.cjs +0 -2
- package/server/fastify/Logger.d.ts +0 -13
- package/server/fastify/Logger.js +0 -1
- package/server/fastify/PathProvider.cjs +0 -34
- package/server/fastify/PathProvider.d.ts +0 -12
- package/server/fastify/PathProvider.js +0 -30
- package/server/fastify/index.cjs +0 -24
- package/server/fastify/index.d.ts +0 -8
- package/server/fastify/index.js +0 -8
- package/server/fastify/modelFusionFlowPlugin.cjs +0 -103
- package/server/fastify/modelFusionFlowPlugin.d.ts +0 -12
- package/server/fastify/modelFusionFlowPlugin.js +0 -99
- package/util/getAudioFileExtension.cjs +0 -29
- package/util/getAudioFileExtension.d.ts +0 -1
- package/util/getAudioFileExtension.js +0 -25
- /package/{composed-function/summarize/SummarizationFunction.cjs → core/ExtensionFunctionEvent.cjs} +0 -0
- /package/{composed-function/summarize/SummarizationFunction.js → core/ExtensionFunctionEvent.js} +0 -0
- /package/{cost/CostCalculator.js → model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.d.ts} +0 -0
- /package/{guard/GuardEvent.js → model-provider/llamacpp/LlamaCppCompletionModel.test.d.ts} +0 -0
- /package/model-provider/mistral/{MistralPromptTemplate.cjs → MistralChatPromptTemplate.cjs} +0 -0
- /package/model-provider/mistral/{MistralPromptTemplate.d.ts → MistralChatPromptTemplate.d.ts} +0 -0
- /package/model-provider/mistral/{MistralPromptTemplate.js → MistralChatPromptTemplate.js} +0 -0
- /package/{cost/CostCalculator.cjs → tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.cjs} +0 -0
- /package/{model-provider/llamacpp/LlamaCppTextGenerationModel.test.d.ts → tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js} +0 -0
@@ -3,5 +3,5 @@ export * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
|
|
3
3
|
export { LlamaCppError, LlamaCppErrorData } from "./LlamaCppError.js";
|
4
4
|
export * as llamacpp from "./LlamaCppFacade.js";
|
5
5
|
export * from "./LlamaCppTextEmbeddingModel.js";
|
6
|
-
export * from "./
|
6
|
+
export * from "./LlamaCppCompletionModel.js";
|
7
7
|
export * from "./LlamaCppTokenizer.js";
|
@@ -3,5 +3,5 @@ export * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
|
|
3
3
|
export { LlamaCppError } from "./LlamaCppError.js";
|
4
4
|
export * as llamacpp from "./LlamaCppFacade.js";
|
5
5
|
export * from "./LlamaCppTextEmbeddingModel.js";
|
6
|
-
export * from "./
|
6
|
+
export * from "./LlamaCppCompletionModel.js";
|
7
7
|
export * from "./LlamaCppTokenizer.js";
|
@@ -10,8 +10,8 @@ const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/gene
|
|
10
10
|
const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
|
11
11
|
const createEventSourceResponseHandler_js_1 = require("../../util/streaming/createEventSourceResponseHandler.cjs");
|
12
12
|
const MistralApiConfiguration_js_1 = require("./MistralApiConfiguration.cjs");
|
13
|
+
const MistralChatPromptTemplate_js_1 = require("./MistralChatPromptTemplate.cjs");
|
13
14
|
const MistralError_js_1 = require("./MistralError.cjs");
|
14
|
-
const MistralPromptTemplate_js_1 = require("./MistralPromptTemplate.cjs");
|
15
15
|
class MistralChatModel extends AbstractModel_js_1.AbstractModel {
|
16
16
|
constructor(settings) {
|
17
17
|
super({ settings });
|
@@ -118,19 +118,19 @@ class MistralChatModel extends AbstractModel_js_1.AbstractModel {
|
|
118
118
|
* Returns this model with a text prompt template.
|
119
119
|
*/
|
120
120
|
withTextPrompt() {
|
121
|
-
return this.withPromptTemplate((0,
|
121
|
+
return this.withPromptTemplate((0, MistralChatPromptTemplate_js_1.text)());
|
122
122
|
}
|
123
123
|
/**
|
124
124
|
* Returns this model with an instruction prompt template.
|
125
125
|
*/
|
126
126
|
withInstructionPrompt() {
|
127
|
-
return this.withPromptTemplate((0,
|
127
|
+
return this.withPromptTemplate((0, MistralChatPromptTemplate_js_1.instruction)());
|
128
128
|
}
|
129
129
|
/**
|
130
130
|
* Returns this model with a chat prompt template.
|
131
131
|
*/
|
132
132
|
withChatPrompt() {
|
133
|
-
return this.withPromptTemplate((0,
|
133
|
+
return this.withPromptTemplate((0, MistralChatPromptTemplate_js_1.chat)());
|
134
134
|
}
|
135
135
|
withPromptTemplate(promptTemplate) {
|
136
136
|
return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
|
@@ -64,8 +64,8 @@ export declare class MistralChatModel extends AbstractModel<MistralChatModelSett
|
|
64
64
|
object: string;
|
65
65
|
usage: {
|
66
66
|
prompt_tokens: number;
|
67
|
-
total_tokens: number;
|
68
67
|
completion_tokens: number;
|
68
|
+
total_tokens: number;
|
69
69
|
};
|
70
70
|
model: string;
|
71
71
|
id: string;
|
@@ -154,19 +154,19 @@ declare const mistralChatResponseSchema: z.ZodObject<{
|
|
154
154
|
total_tokens: z.ZodNumber;
|
155
155
|
}, "strip", z.ZodTypeAny, {
|
156
156
|
prompt_tokens: number;
|
157
|
-
total_tokens: number;
|
158
157
|
completion_tokens: number;
|
158
|
+
total_tokens: number;
|
159
159
|
}, {
|
160
160
|
prompt_tokens: number;
|
161
|
-
total_tokens: number;
|
162
161
|
completion_tokens: number;
|
162
|
+
total_tokens: number;
|
163
163
|
}>;
|
164
164
|
}, "strip", z.ZodTypeAny, {
|
165
165
|
object: string;
|
166
166
|
usage: {
|
167
167
|
prompt_tokens: number;
|
168
|
-
total_tokens: number;
|
169
168
|
completion_tokens: number;
|
169
|
+
total_tokens: number;
|
170
170
|
};
|
171
171
|
model: string;
|
172
172
|
id: string;
|
@@ -183,8 +183,8 @@ declare const mistralChatResponseSchema: z.ZodObject<{
|
|
183
183
|
object: string;
|
184
184
|
usage: {
|
185
185
|
prompt_tokens: number;
|
186
|
-
total_tokens: number;
|
187
186
|
completion_tokens: number;
|
187
|
+
total_tokens: number;
|
188
188
|
};
|
189
189
|
model: string;
|
190
190
|
id: string;
|
@@ -228,8 +228,8 @@ export declare const MistralChatResponseFormat: {
|
|
228
228
|
object: string;
|
229
229
|
usage: {
|
230
230
|
prompt_tokens: number;
|
231
|
-
total_tokens: number;
|
232
231
|
completion_tokens: number;
|
232
|
+
total_tokens: number;
|
233
233
|
};
|
234
234
|
model: string;
|
235
235
|
id: string;
|
@@ -7,8 +7,8 @@ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-
|
|
7
7
|
import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
|
8
8
|
import { createEventSourceResponseHandler } from "../../util/streaming/createEventSourceResponseHandler.js";
|
9
9
|
import { MistralApiConfiguration } from "./MistralApiConfiguration.js";
|
10
|
+
import { chat, instruction, text } from "./MistralChatPromptTemplate.js";
|
10
11
|
import { failedMistralCallResponseHandler } from "./MistralError.js";
|
11
|
-
import { chat, instruction, text } from "./MistralPromptTemplate.js";
|
12
12
|
export class MistralChatModel extends AbstractModel {
|
13
13
|
constructor(settings) {
|
14
14
|
super({ settings });
|
@@ -32,16 +32,16 @@ export declare class MistralTextEmbeddingModel extends AbstractModel<MistralText
|
|
32
32
|
doEmbedValues(texts: string[], options?: FunctionOptions): Promise<{
|
33
33
|
response: {
|
34
34
|
object: string;
|
35
|
-
usage: {
|
36
|
-
prompt_tokens: number;
|
37
|
-
total_tokens: number;
|
38
|
-
};
|
39
|
-
model: string;
|
40
35
|
data: {
|
41
36
|
object: string;
|
42
37
|
embedding: number[];
|
43
38
|
index: number;
|
44
39
|
}[];
|
40
|
+
usage: {
|
41
|
+
prompt_tokens: number;
|
42
|
+
total_tokens: number;
|
43
|
+
};
|
44
|
+
model: string;
|
45
45
|
id: string;
|
46
46
|
};
|
47
47
|
embeddings: number[][];
|
@@ -77,29 +77,29 @@ declare const MistralTextEmbeddingResponseSchema: z.ZodObject<{
|
|
77
77
|
}>;
|
78
78
|
}, "strip", z.ZodTypeAny, {
|
79
79
|
object: string;
|
80
|
-
usage: {
|
81
|
-
prompt_tokens: number;
|
82
|
-
total_tokens: number;
|
83
|
-
};
|
84
|
-
model: string;
|
85
80
|
data: {
|
86
81
|
object: string;
|
87
82
|
embedding: number[];
|
88
83
|
index: number;
|
89
84
|
}[];
|
90
|
-
id: string;
|
91
|
-
}, {
|
92
|
-
object: string;
|
93
85
|
usage: {
|
94
86
|
prompt_tokens: number;
|
95
87
|
total_tokens: number;
|
96
88
|
};
|
97
89
|
model: string;
|
90
|
+
id: string;
|
91
|
+
}, {
|
92
|
+
object: string;
|
98
93
|
data: {
|
99
94
|
object: string;
|
100
95
|
embedding: number[];
|
101
96
|
index: number;
|
102
97
|
}[];
|
98
|
+
usage: {
|
99
|
+
prompt_tokens: number;
|
100
|
+
total_tokens: number;
|
101
|
+
};
|
102
|
+
model: string;
|
103
103
|
id: string;
|
104
104
|
}>;
|
105
105
|
export type MistralTextEmbeddingResponse = z.infer<typeof MistralTextEmbeddingResponseSchema>;
|
@@ -26,9 +26,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
26
26
|
return result;
|
27
27
|
};
|
28
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
29
|
-
exports.
|
29
|
+
exports.mistral = exports.MistralChatPrompt = void 0;
|
30
30
|
__exportStar(require("./MistralApiConfiguration.cjs"), exports);
|
31
|
+
__exportStar(require("./MistralChatModel.cjs"), exports);
|
32
|
+
exports.MistralChatPrompt = __importStar(require("./MistralChatPromptTemplate.cjs"));
|
31
33
|
exports.mistral = __importStar(require("./MistralFacade.cjs"));
|
32
|
-
exports.MistralPrompt = __importStar(require("./MistralPromptTemplate.cjs"));
|
33
34
|
__exportStar(require("./MistralTextEmbeddingModel.cjs"), exports);
|
34
|
-
__exportStar(require("./MistralChatModel.cjs"), exports);
|
@@ -1,6 +1,6 @@
|
|
1
1
|
export * from "./MistralApiConfiguration.js";
|
2
|
+
export * from "./MistralChatModel.js";
|
3
|
+
export * as MistralChatPrompt from "./MistralChatPromptTemplate.js";
|
2
4
|
export { MistralErrorData } from "./MistralError.js";
|
3
5
|
export * as mistral from "./MistralFacade.js";
|
4
|
-
export * as MistralPrompt from "./MistralPromptTemplate.js";
|
5
6
|
export * from "./MistralTextEmbeddingModel.js";
|
6
|
-
export * from "./MistralChatModel.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
export * from "./MistralApiConfiguration.js";
|
2
|
+
export * from "./MistralChatModel.js";
|
3
|
+
export * as MistralChatPrompt from "./MistralChatPromptTemplate.js";
|
2
4
|
export * as mistral from "./MistralFacade.js";
|
3
|
-
export * as MistralPrompt from "./MistralPromptTemplate.js";
|
4
5
|
export * from "./MistralTextEmbeddingModel.js";
|
5
|
-
export * from "./MistralChatModel.js";
|
@@ -8,7 +8,8 @@ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-
|
|
8
8
|
import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
|
9
9
|
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
10
10
|
import { TextGenerationToolCallModel, ToolCallPromptTemplate } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
|
11
|
-
import { TextGenerationToolCallsOrGenerateTextModel
|
11
|
+
import { TextGenerationToolCallsOrGenerateTextModel } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
|
12
|
+
import { ToolCallsOrGenerateTextPromptTemplate } from "../../tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js";
|
12
13
|
import { OllamaTextGenerationSettings } from "./OllamaTextGenerationSettings.js";
|
13
14
|
export type OllamaChatMessage = {
|
14
15
|
role: "system" | "user" | "assistant";
|
@@ -38,11 +39,11 @@ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettin
|
|
38
39
|
get settingsForEvent(): Partial<OllamaChatModelSettings>;
|
39
40
|
doGenerateTexts(prompt: OllamaChatPrompt, options?: FunctionOptions): Promise<{
|
40
41
|
response: {
|
41
|
-
model: string;
|
42
42
|
message: {
|
43
43
|
role: string;
|
44
44
|
content: string;
|
45
45
|
};
|
46
|
+
model: string;
|
46
47
|
done: true;
|
47
48
|
created_at: string;
|
48
49
|
total_duration: number;
|
@@ -58,11 +59,11 @@ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettin
|
|
58
59
|
}[];
|
59
60
|
}>;
|
60
61
|
doStreamText(prompt: OllamaChatPrompt, options?: FunctionOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
|
61
|
-
model: string;
|
62
62
|
message: {
|
63
63
|
role: string;
|
64
64
|
content: string;
|
65
65
|
};
|
66
|
+
model: string;
|
66
67
|
done: false;
|
67
68
|
created_at: string;
|
68
69
|
} | {
|
@@ -115,11 +116,11 @@ declare const ollamaChatResponseSchema: z.ZodObject<{
|
|
115
116
|
eval_count: z.ZodNumber;
|
116
117
|
eval_duration: z.ZodNumber;
|
117
118
|
}, "strip", z.ZodTypeAny, {
|
118
|
-
model: string;
|
119
119
|
message: {
|
120
120
|
role: string;
|
121
121
|
content: string;
|
122
122
|
};
|
123
|
+
model: string;
|
123
124
|
done: true;
|
124
125
|
created_at: string;
|
125
126
|
total_duration: number;
|
@@ -129,11 +130,11 @@ declare const ollamaChatResponseSchema: z.ZodObject<{
|
|
129
130
|
load_duration?: number | undefined;
|
130
131
|
prompt_eval_duration?: number | undefined;
|
131
132
|
}, {
|
132
|
-
model: string;
|
133
133
|
message: {
|
134
134
|
role: string;
|
135
135
|
content: string;
|
136
136
|
};
|
137
|
+
model: string;
|
137
138
|
done: true;
|
138
139
|
created_at: string;
|
139
140
|
total_duration: number;
|
@@ -145,11 +146,11 @@ declare const ollamaChatResponseSchema: z.ZodObject<{
|
|
145
146
|
}>;
|
146
147
|
export type OllamaChatResponse = z.infer<typeof ollamaChatResponseSchema>;
|
147
148
|
declare const ollamaChatStreamChunkSchema: ZodSchema<{
|
148
|
-
model: string;
|
149
149
|
message: {
|
150
150
|
role: string;
|
151
151
|
content: string;
|
152
152
|
};
|
153
|
+
model: string;
|
153
154
|
done: false;
|
154
155
|
created_at: string;
|
155
156
|
} | {
|
@@ -179,11 +180,11 @@ export declare const OllamaChatResponseFormat: {
|
|
179
180
|
requestBodyValues: unknown;
|
180
181
|
response: Response;
|
181
182
|
}) => Promise<{
|
182
|
-
model: string;
|
183
183
|
message: {
|
184
184
|
role: string;
|
185
185
|
content: string;
|
186
186
|
};
|
187
|
+
model: string;
|
187
188
|
done: true;
|
188
189
|
created_at: string;
|
189
190
|
total_duration: number;
|
@@ -203,11 +204,11 @@ export declare const OllamaChatResponseFormat: {
|
|
203
204
|
handler: ({ response }: {
|
204
205
|
response: Response;
|
205
206
|
}) => Promise<AsyncIterable<import("../../index.js").Delta<{
|
206
|
-
model: string;
|
207
207
|
message: {
|
208
208
|
role: string;
|
209
209
|
content: string;
|
210
210
|
};
|
211
|
+
model: string;
|
211
212
|
done: false;
|
212
213
|
created_at: string;
|
213
214
|
} | {
|
@@ -8,7 +8,7 @@ import { AbstractModel } from "../../model-function/AbstractModel.js";
|
|
8
8
|
import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
|
9
9
|
import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
|
10
10
|
import { TextGenerationToolCallModel, } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
|
11
|
-
import { TextGenerationToolCallsOrGenerateTextModel
|
11
|
+
import { TextGenerationToolCallsOrGenerateTextModel } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
|
12
12
|
import { createJsonStreamResponseHandler } from "../../util/streaming/createJsonStreamResponseHandler.js";
|
13
13
|
import { OllamaApiConfiguration } from "./OllamaApiConfiguration.js";
|
14
14
|
import { chat, instruction, text } from "./OllamaChatPromptTemplate.js";
|
@@ -8,7 +8,8 @@ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-
|
|
8
8
|
import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
|
9
9
|
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
10
10
|
import { TextGenerationToolCallModel, ToolCallPromptTemplate } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
|
11
|
-
import { TextGenerationToolCallsOrGenerateTextModel
|
11
|
+
import { TextGenerationToolCallsOrGenerateTextModel } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
|
12
|
+
import { ToolCallsOrGenerateTextPromptTemplate } from "../../tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js";
|
12
13
|
import { OllamaTextGenerationSettings } from "./OllamaTextGenerationSettings.js";
|
13
14
|
export interface OllamaCompletionPrompt {
|
14
15
|
/**
|
@@ -8,7 +8,7 @@ import { AbstractModel } from "../../model-function/AbstractModel.js";
|
|
8
8
|
import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
|
9
9
|
import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
|
10
10
|
import { TextGenerationToolCallModel, } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
|
11
|
-
import { TextGenerationToolCallsOrGenerateTextModel
|
11
|
+
import { TextGenerationToolCallsOrGenerateTextModel } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
|
12
12
|
import { createJsonStreamResponseHandler } from "../../util/streaming/createJsonStreamResponseHandler.js";
|
13
13
|
import { OllamaApiConfiguration } from "./OllamaApiConfiguration.js";
|
14
14
|
import { failedOllamaCallResponseHandler } from "./OllamaError.js";
|
@@ -132,13 +132,7 @@ describe("streamStructure", () => {
|
|
132
132
|
raw: true,
|
133
133
|
})
|
134
134
|
.withTextPromptTemplate(TextPrompt.instruction())
|
135
|
-
.asStructureGenerationModel((0,
|
136
|
-
system: "JSON schema: \n" +
|
137
|
-
JSON.stringify(schema.getJsonSchema()) +
|
138
|
-
"\n\n" +
|
139
|
-
"Respond only using JSON that matches the above schema.",
|
140
|
-
instruction,
|
141
|
-
}))), (0, ZodSchema_js_1.zodSchema)(zod_1.z.object({ name: zod_1.z.string() })), "generate a name");
|
135
|
+
.asStructureGenerationModel(jsonStructurePrompt_js_1.jsonStructurePrompt.text()), (0, ZodSchema_js_1.zodSchema)(zod_1.z.object({ name: zod_1.z.string() })), "generate a name");
|
142
136
|
// note: space moved to last chunk bc of trimming
|
143
137
|
expect(await (0, arrayFromAsync_js_1.arrayFromAsync)(stream)).toStrictEqual([
|
144
138
|
{ isComplete: false, value: {} },
|
@@ -107,13 +107,7 @@ describe("streamStructure", () => {
|
|
107
107
|
raw: true,
|
108
108
|
})
|
109
109
|
.withTextPromptTemplate(TextPrompt.instruction())
|
110
|
-
.asStructureGenerationModel(jsonStructurePrompt(
|
111
|
-
system: "JSON schema: \n" +
|
112
|
-
JSON.stringify(schema.getJsonSchema()) +
|
113
|
-
"\n\n" +
|
114
|
-
"Respond only using JSON that matches the above schema.",
|
115
|
-
instruction,
|
116
|
-
}))), zodSchema(z.object({ name: z.string() })), "generate a name");
|
110
|
+
.asStructureGenerationModel(jsonStructurePrompt.text()), zodSchema(z.object({ name: z.string() })), "generate a name");
|
117
111
|
// note: space moved to last chunk bc of trimming
|
118
112
|
expect(await arrayFromAsync(stream)).toStrictEqual([
|
119
113
|
{ isComplete: false, value: {} },
|
@@ -97,8 +97,8 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
97
97
|
object: "chat.completion";
|
98
98
|
usage: {
|
99
99
|
prompt_tokens: number;
|
100
|
-
total_tokens: number;
|
101
100
|
completion_tokens: number;
|
101
|
+
total_tokens: number;
|
102
102
|
};
|
103
103
|
model: string;
|
104
104
|
id: string;
|
@@ -172,8 +172,8 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
172
172
|
object: "chat.completion";
|
173
173
|
usage: {
|
174
174
|
prompt_tokens: number;
|
175
|
-
total_tokens: number;
|
176
175
|
completion_tokens: number;
|
176
|
+
total_tokens: number;
|
177
177
|
};
|
178
178
|
model: string;
|
179
179
|
id: string;
|
@@ -216,8 +216,8 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
216
216
|
object: "chat.completion";
|
217
217
|
usage: {
|
218
218
|
prompt_tokens: number;
|
219
|
-
total_tokens: number;
|
220
219
|
completion_tokens: number;
|
220
|
+
total_tokens: number;
|
221
221
|
};
|
222
222
|
model: string;
|
223
223
|
id: string;
|
@@ -392,19 +392,19 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
392
392
|
total_tokens: z.ZodNumber;
|
393
393
|
}, "strip", z.ZodTypeAny, {
|
394
394
|
prompt_tokens: number;
|
395
|
-
total_tokens: number;
|
396
395
|
completion_tokens: number;
|
396
|
+
total_tokens: number;
|
397
397
|
}, {
|
398
398
|
prompt_tokens: number;
|
399
|
-
total_tokens: number;
|
400
399
|
completion_tokens: number;
|
400
|
+
total_tokens: number;
|
401
401
|
}>;
|
402
402
|
}, "strip", z.ZodTypeAny, {
|
403
403
|
object: "chat.completion";
|
404
404
|
usage: {
|
405
405
|
prompt_tokens: number;
|
406
|
-
total_tokens: number;
|
407
406
|
completion_tokens: number;
|
407
|
+
total_tokens: number;
|
408
408
|
};
|
409
409
|
model: string;
|
410
410
|
id: string;
|
@@ -435,8 +435,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
435
435
|
object: "chat.completion";
|
436
436
|
usage: {
|
437
437
|
prompt_tokens: number;
|
438
|
-
total_tokens: number;
|
439
438
|
completion_tokens: number;
|
439
|
+
total_tokens: number;
|
440
440
|
};
|
441
441
|
model: string;
|
442
442
|
id: string;
|
@@ -683,8 +683,8 @@ export declare const OpenAIChatResponseFormat: {
|
|
683
683
|
object: "chat.completion";
|
684
684
|
usage: {
|
685
685
|
prompt_tokens: number;
|
686
|
-
total_tokens: number;
|
687
686
|
completion_tokens: number;
|
687
|
+
total_tokens: number;
|
688
688
|
};
|
689
689
|
model: string;
|
690
690
|
id: string;
|
@@ -148,8 +148,8 @@ export declare class OpenAICompletionModel extends AbstractModel<OpenAICompletio
|
|
148
148
|
object: "text_completion";
|
149
149
|
usage: {
|
150
150
|
prompt_tokens: number;
|
151
|
-
total_tokens: number;
|
152
151
|
completion_tokens: number;
|
152
|
+
total_tokens: number;
|
153
153
|
};
|
154
154
|
model: string;
|
155
155
|
id: string;
|
@@ -228,19 +228,19 @@ declare const OpenAICompletionResponseSchema: z.ZodObject<{
|
|
228
228
|
total_tokens: z.ZodNumber;
|
229
229
|
}, "strip", z.ZodTypeAny, {
|
230
230
|
prompt_tokens: number;
|
231
|
-
total_tokens: number;
|
232
231
|
completion_tokens: number;
|
232
|
+
total_tokens: number;
|
233
233
|
}, {
|
234
234
|
prompt_tokens: number;
|
235
|
-
total_tokens: number;
|
236
235
|
completion_tokens: number;
|
236
|
+
total_tokens: number;
|
237
237
|
}>;
|
238
238
|
}, "strip", z.ZodTypeAny, {
|
239
239
|
object: "text_completion";
|
240
240
|
usage: {
|
241
241
|
prompt_tokens: number;
|
242
|
-
total_tokens: number;
|
243
242
|
completion_tokens: number;
|
243
|
+
total_tokens: number;
|
244
244
|
};
|
245
245
|
model: string;
|
246
246
|
id: string;
|
@@ -256,8 +256,8 @@ declare const OpenAICompletionResponseSchema: z.ZodObject<{
|
|
256
256
|
object: "text_completion";
|
257
257
|
usage: {
|
258
258
|
prompt_tokens: number;
|
259
|
-
total_tokens: number;
|
260
259
|
completion_tokens: number;
|
260
|
+
total_tokens: number;
|
261
261
|
};
|
262
262
|
model: string;
|
263
263
|
id: string;
|
@@ -285,8 +285,8 @@ export declare const OpenAITextResponseFormat: {
|
|
285
285
|
object: "text_completion";
|
286
286
|
usage: {
|
287
287
|
prompt_tokens: number;
|
288
|
-
total_tokens: number;
|
289
288
|
completion_tokens: number;
|
289
|
+
total_tokens: number;
|
290
290
|
};
|
291
291
|
model: string;
|
292
292
|
id: string;
|
@@ -52,16 +52,16 @@ export declare class OpenAITextEmbeddingModel extends AbstractModel<OpenAITextEm
|
|
52
52
|
doEmbedValues(texts: string[], options?: FunctionOptions): Promise<{
|
53
53
|
response: {
|
54
54
|
object: "list";
|
55
|
-
usage: {
|
56
|
-
prompt_tokens: number;
|
57
|
-
total_tokens: number;
|
58
|
-
};
|
59
|
-
model: string;
|
60
55
|
data: {
|
61
56
|
object: "embedding";
|
62
57
|
embedding: number[];
|
63
58
|
index: number;
|
64
59
|
}[];
|
60
|
+
usage: {
|
61
|
+
prompt_tokens: number;
|
62
|
+
total_tokens: number;
|
63
|
+
};
|
64
|
+
model: string;
|
65
65
|
};
|
66
66
|
embeddings: number[][];
|
67
67
|
}>;
|
@@ -95,28 +95,28 @@ declare const openAITextEmbeddingResponseSchema: z.ZodObject<{
|
|
95
95
|
}>;
|
96
96
|
}, "strip", z.ZodTypeAny, {
|
97
97
|
object: "list";
|
98
|
-
usage: {
|
99
|
-
prompt_tokens: number;
|
100
|
-
total_tokens: number;
|
101
|
-
};
|
102
|
-
model: string;
|
103
98
|
data: {
|
104
99
|
object: "embedding";
|
105
100
|
embedding: number[];
|
106
101
|
index: number;
|
107
102
|
}[];
|
108
|
-
}, {
|
109
|
-
object: "list";
|
110
103
|
usage: {
|
111
104
|
prompt_tokens: number;
|
112
105
|
total_tokens: number;
|
113
106
|
};
|
114
107
|
model: string;
|
108
|
+
}, {
|
109
|
+
object: "list";
|
115
110
|
data: {
|
116
111
|
object: "embedding";
|
117
112
|
embedding: number[];
|
118
113
|
index: number;
|
119
114
|
}[];
|
115
|
+
usage: {
|
116
|
+
prompt_tokens: number;
|
117
|
+
total_tokens: number;
|
118
|
+
};
|
119
|
+
model: string;
|
120
120
|
}>;
|
121
121
|
export type OpenAITextEmbeddingResponse = z.infer<typeof openAITextEmbeddingResponseSchema>;
|
122
122
|
export {};
|
@@ -64,6 +64,9 @@ export declare class OpenAITranscriptionModel extends AbstractModel<OpenAITransc
|
|
64
64
|
doTranscribe(data: OpenAITranscriptionInput, options?: FunctionOptions): Promise<{
|
65
65
|
response: {
|
66
66
|
text: string;
|
67
|
+
task: "transcribe";
|
68
|
+
language: string;
|
69
|
+
duration: number;
|
67
70
|
segments: {
|
68
71
|
text: string;
|
69
72
|
id: number;
|
@@ -77,9 +80,6 @@ export declare class OpenAITranscriptionModel extends AbstractModel<OpenAITransc
|
|
77
80
|
no_speech_prob: number;
|
78
81
|
transient?: boolean | undefined;
|
79
82
|
}[];
|
80
|
-
task: "transcribe";
|
81
|
-
language: string;
|
82
|
-
duration: number;
|
83
83
|
};
|
84
84
|
transcription: string;
|
85
85
|
}>;
|
@@ -141,6 +141,9 @@ declare const openAITranscriptionVerboseJsonSchema: z.ZodObject<{
|
|
141
141
|
text: z.ZodString;
|
142
142
|
}, "strip", z.ZodTypeAny, {
|
143
143
|
text: string;
|
144
|
+
task: "transcribe";
|
145
|
+
language: string;
|
146
|
+
duration: number;
|
144
147
|
segments: {
|
145
148
|
text: string;
|
146
149
|
id: number;
|
@@ -154,11 +157,11 @@ declare const openAITranscriptionVerboseJsonSchema: z.ZodObject<{
|
|
154
157
|
no_speech_prob: number;
|
155
158
|
transient?: boolean | undefined;
|
156
159
|
}[];
|
160
|
+
}, {
|
161
|
+
text: string;
|
157
162
|
task: "transcribe";
|
158
163
|
language: string;
|
159
164
|
duration: number;
|
160
|
-
}, {
|
161
|
-
text: string;
|
162
165
|
segments: {
|
163
166
|
text: string;
|
164
167
|
id: number;
|
@@ -172,9 +175,6 @@ declare const openAITranscriptionVerboseJsonSchema: z.ZodObject<{
|
|
172
175
|
no_speech_prob: number;
|
173
176
|
transient?: boolean | undefined;
|
174
177
|
}[];
|
175
|
-
task: "transcribe";
|
176
|
-
language: string;
|
177
|
-
duration: number;
|
178
178
|
}>;
|
179
179
|
export type OpenAITranscriptionVerboseJsonResponse = z.infer<typeof openAITranscriptionVerboseJsonSchema>;
|
180
180
|
export type OpenAITranscriptionResponseFormatType<T> = {
|
@@ -192,6 +192,9 @@ export declare const OpenAITranscriptionResponseFormat: {
|
|
192
192
|
type: "verbose_json";
|
193
193
|
handler: ResponseHandler<{
|
194
194
|
text: string;
|
195
|
+
task: "transcribe";
|
196
|
+
language: string;
|
197
|
+
duration: number;
|
195
198
|
segments: {
|
196
199
|
text: string;
|
197
200
|
id: number;
|
@@ -205,9 +208,6 @@ export declare const OpenAITranscriptionResponseFormat: {
|
|
205
208
|
no_speech_prob: number;
|
206
209
|
transient?: boolean | undefined;
|
207
210
|
}[];
|
208
|
-
task: "transcribe";
|
209
|
-
language: string;
|
210
|
-
duration: number;
|
211
211
|
}>;
|
212
212
|
};
|
213
213
|
text: {
|
@@ -34,7 +34,6 @@ __exportStar(require("./OpenAIChatMessage.cjs"), exports);
|
|
34
34
|
__exportStar(require("./OpenAIChatModel.cjs"), exports);
|
35
35
|
exports.OpenAIChatPrompt = __importStar(require("./OpenAIChatPromptTemplate.cjs"));
|
36
36
|
__exportStar(require("./OpenAICompletionModel.cjs"), exports);
|
37
|
-
__exportStar(require("./OpenAICostCalculator.cjs"), exports);
|
38
37
|
exports.openai = __importStar(require("./OpenAIFacade.cjs"));
|
39
38
|
__exportStar(require("./OpenAIImageGenerationModel.cjs"), exports);
|
40
39
|
__exportStar(require("./OpenAISpeechModel.cjs"), exports);
|
@@ -5,7 +5,6 @@ export * from "./OpenAIChatMessage.js";
|
|
5
5
|
export * from "./OpenAIChatModel.js";
|
6
6
|
export * as OpenAIChatPrompt from "./OpenAIChatPromptTemplate.js";
|
7
7
|
export * from "./OpenAICompletionModel.js";
|
8
|
-
export * from "./OpenAICostCalculator.js";
|
9
8
|
export { OpenAIErrorData } from "./OpenAIError.js";
|
10
9
|
export * as openai from "./OpenAIFacade.js";
|
11
10
|
export * from "./OpenAIImageGenerationModel.js";
|
@@ -5,7 +5,6 @@ export * from "./OpenAIChatMessage.js";
|
|
5
5
|
export * from "./OpenAIChatModel.js";
|
6
6
|
export * as OpenAIChatPrompt from "./OpenAIChatPromptTemplate.js";
|
7
7
|
export * from "./OpenAICompletionModel.js";
|
8
|
-
export * from "./OpenAICostCalculator.js";
|
9
8
|
export * as openai from "./OpenAIFacade.js";
|
10
9
|
export * from "./OpenAIImageGenerationModel.js";
|
11
10
|
export * from "./OpenAISpeechModel.js";
|