modelfusion 0.104.0 → 0.106.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +60 -0
- package/README.md +8 -10
- package/core/DefaultRun.cjs +0 -4
- package/core/DefaultRun.d.ts +0 -2
- package/core/DefaultRun.js +0 -4
- package/core/ExtensionFunctionEvent.d.ts +11 -0
- package/core/FunctionEvent.d.ts +2 -2
- package/extension/index.cjs +22 -3
- package/extension/index.d.ts +5 -1
- package/extension/index.js +4 -1
- package/index.cjs +0 -3
- package/index.d.ts +0 -3
- package/index.js +0 -3
- package/model-function/Delta.d.ts +1 -2
- package/model-function/executeStreamCall.cjs +6 -4
- package/model-function/executeStreamCall.d.ts +2 -2
- package/model-function/executeStreamCall.js +6 -4
- package/model-function/generate-speech/streamSpeech.cjs +1 -2
- package/model-function/generate-speech/streamSpeech.js +1 -2
- package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +25 -29
- package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +3 -1
- package/model-function/generate-structure/StructureFromTextStreamingModel.js +25 -29
- package/model-function/generate-structure/StructureGenerationModel.d.ts +2 -0
- package/model-function/generate-structure/jsonStructurePrompt.cjs +42 -6
- package/model-function/generate-structure/jsonStructurePrompt.d.ts +12 -1
- package/model-function/generate-structure/jsonStructurePrompt.js +42 -5
- package/model-function/generate-structure/streamStructure.cjs +7 -8
- package/model-function/generate-structure/streamStructure.d.ts +1 -1
- package/model-function/generate-structure/streamStructure.js +7 -8
- package/model-function/generate-text/PromptTemplateFullTextModel.cjs +35 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +41 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.js +31 -0
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextGenerationModel.js +1 -1
- package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +3 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextStreamingModel.js +3 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/index.cjs +1 -0
- package/model-function/generate-text/index.d.ts +1 -0
- package/model-function/generate-text/index.js +1 -0
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +2 -2
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +1 -1
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs +8 -5
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.js +7 -4
- package/model-function/generate-text/prompt-template/ChatPrompt.cjs +42 -0
- package/model-function/generate-text/prompt-template/ChatPrompt.d.ts +27 -5
- package/model-function/generate-text/prompt-template/ChatPrompt.js +41 -1
- package/model-function/generate-text/prompt-template/{Content.cjs → ContentPart.cjs} +1 -1
- package/model-function/generate-text/prompt-template/ContentPart.d.ts +30 -0
- package/model-function/generate-text/prompt-template/{Content.js → ContentPart.js} +1 -1
- package/model-function/generate-text/prompt-template/InstructionPrompt.d.ts +3 -2
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +8 -5
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.d.ts +1 -1
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +6 -3
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs +8 -4
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/TextPromptTemplate.cjs +8 -4
- package/model-function/generate-text/prompt-template/TextPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +7 -3
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/index.cjs +1 -1
- package/model-function/generate-text/prompt-template/index.d.ts +1 -1
- package/model-function/generate-text/prompt-template/index.js +1 -1
- package/model-function/generate-text/streamText.cjs +27 -28
- package/model-function/generate-text/streamText.d.ts +1 -0
- package/model-function/generate-text/streamText.js +27 -28
- package/model-function/index.cjs +0 -1
- package/model-function/index.d.ts +0 -1
- package/model-function/index.js +0 -1
- package/model-provider/anthropic/AnthropicPromptTemplate.cjs +7 -3
- package/model-provider/anthropic/AnthropicPromptTemplate.js +5 -1
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +11 -2
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.cjs +44 -0
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.js +42 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -44
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +49 -15
- package/model-provider/cohere/CohereTextGenerationModel.js +7 -45
- package/model-provider/cohere/CohereTextGenerationModel.test.cjs +33 -0
- package/model-provider/cohere/CohereTextGenerationModel.test.js +31 -0
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +1 -2
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +1 -2
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +6 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +6 -1
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +7 -14
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +171 -20
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +8 -15
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.cjs +37 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.js +35 -0
- package/model-provider/mistral/MistralChatModel.cjs +30 -104
- package/model-provider/mistral/MistralChatModel.d.ts +47 -14
- package/model-provider/mistral/MistralChatModel.js +30 -104
- package/model-provider/mistral/MistralChatModel.test.cjs +51 -0
- package/model-provider/mistral/MistralChatModel.test.js +49 -0
- package/model-provider/mistral/MistralPromptTemplate.cjs +11 -4
- package/model-provider/mistral/MistralPromptTemplate.js +9 -2
- package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +13 -13
- package/model-provider/ollama/OllamaChatModel.cjs +7 -43
- package/model-provider/ollama/OllamaChatModel.d.ts +67 -14
- package/model-provider/ollama/OllamaChatModel.js +8 -44
- package/model-provider/ollama/OllamaChatModel.test.cjs +27 -0
- package/model-provider/ollama/OllamaChatModel.test.js +25 -0
- package/model-provider/ollama/OllamaChatPromptTemplate.cjs +34 -4
- package/model-provider/ollama/OllamaChatPromptTemplate.js +34 -4
- package/model-provider/ollama/OllamaCompletionModel.cjs +22 -43
- package/model-provider/ollama/OllamaCompletionModel.d.ts +67 -10
- package/model-provider/ollama/OllamaCompletionModel.js +24 -45
- package/model-provider/ollama/OllamaCompletionModel.test.cjs +95 -13
- package/model-provider/ollama/OllamaCompletionModel.test.js +72 -13
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.cjs → AbstractOpenAIChatModel.cjs} +71 -15
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.d.ts → AbstractOpenAIChatModel.d.ts} +273 -19
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.js → AbstractOpenAIChatModel.js} +71 -15
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.cjs → OpenAIChatFunctionCallStructureGenerationModel.cjs} +18 -2
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts → OpenAIChatFunctionCallStructureGenerationModel.d.ts} +41 -11
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.js → OpenAIChatFunctionCallStructureGenerationModel.js} +18 -2
- package/model-provider/openai/{chat/OpenAIChatMessage.d.ts → OpenAIChatMessage.d.ts} +3 -3
- package/model-provider/openai/{chat/OpenAIChatModel.cjs → OpenAIChatModel.cjs} +5 -5
- package/model-provider/openai/{chat/OpenAIChatModel.d.ts → OpenAIChatModel.d.ts} +12 -12
- package/model-provider/openai/{chat/OpenAIChatModel.js → OpenAIChatModel.js} +5 -5
- package/model-provider/openai/OpenAIChatModel.test.cjs +94 -0
- package/model-provider/openai/OpenAIChatModel.test.js +92 -0
- package/model-provider/openai/OpenAIChatPromptTemplate.cjs +114 -0
- package/model-provider/openai/{chat/OpenAIChatPromptTemplate.d.ts → OpenAIChatPromptTemplate.d.ts} +3 -3
- package/model-provider/openai/OpenAIChatPromptTemplate.js +107 -0
- package/model-provider/openai/OpenAICompletionModel.cjs +32 -84
- package/model-provider/openai/OpenAICompletionModel.d.ts +27 -10
- package/model-provider/openai/OpenAICompletionModel.js +33 -85
- package/model-provider/openai/OpenAICompletionModel.test.cjs +53 -0
- package/model-provider/openai/OpenAICompletionModel.test.js +51 -0
- package/model-provider/openai/OpenAIFacade.cjs +2 -2
- package/model-provider/openai/OpenAIFacade.d.ts +3 -3
- package/model-provider/openai/OpenAIFacade.js +2 -2
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +12 -12
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +17 -17
- package/model-provider/openai/TikTokenTokenizer.d.ts +1 -1
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.cjs → countOpenAIChatMessageTokens.cjs} +2 -2
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.js → countOpenAIChatMessageTokens.js} +2 -2
- package/model-provider/openai/index.cjs +6 -7
- package/model-provider/openai/index.d.ts +5 -7
- package/model-provider/openai/index.js +5 -6
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +4 -4
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +6 -6
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +4 -4
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +5 -5
- package/package.json +13 -24
- package/test/JsonTestServer.cjs +33 -0
- package/test/JsonTestServer.d.ts +7 -0
- package/test/JsonTestServer.js +29 -0
- package/test/StreamingTestServer.cjs +55 -0
- package/test/StreamingTestServer.d.ts +7 -0
- package/test/StreamingTestServer.js +51 -0
- package/test/arrayFromAsync.cjs +13 -0
- package/test/arrayFromAsync.d.ts +1 -0
- package/test/arrayFromAsync.js +9 -0
- package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.js +1 -1
- package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +1 -11
- package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.d.ts +12 -0
- package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js +1 -0
- package/tool/generate-tool-calls-or-text/index.cjs +1 -0
- package/tool/generate-tool-calls-or-text/index.d.ts +1 -0
- package/tool/generate-tool-calls-or-text/index.js +1 -0
- package/util/index.cjs +0 -1
- package/util/index.d.ts +0 -1
- package/util/index.js +0 -1
- package/util/streaming/createEventSourceResponseHandler.cjs +9 -0
- package/util/streaming/createEventSourceResponseHandler.d.ts +4 -0
- package/util/streaming/createEventSourceResponseHandler.js +5 -0
- package/util/streaming/createJsonStreamResponseHandler.cjs +9 -0
- package/util/streaming/createJsonStreamResponseHandler.d.ts +4 -0
- package/util/streaming/createJsonStreamResponseHandler.js +5 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.cjs +52 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.js +48 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.cjs +21 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.js +17 -0
- package/browser/MediaSourceAppender.cjs +0 -54
- package/browser/MediaSourceAppender.d.ts +0 -11
- package/browser/MediaSourceAppender.js +0 -50
- package/browser/convertAudioChunksToBase64.cjs +0 -8
- package/browser/convertAudioChunksToBase64.d.ts +0 -4
- package/browser/convertAudioChunksToBase64.js +0 -4
- package/browser/convertBlobToBase64.cjs +0 -23
- package/browser/convertBlobToBase64.d.ts +0 -1
- package/browser/convertBlobToBase64.js +0 -19
- package/browser/index.cjs +0 -22
- package/browser/index.d.ts +0 -6
- package/browser/index.js +0 -6
- package/browser/invokeFlow.cjs +0 -23
- package/browser/invokeFlow.d.ts +0 -8
- package/browser/invokeFlow.js +0 -19
- package/browser/readEventSource.cjs +0 -29
- package/browser/readEventSource.d.ts +0 -9
- package/browser/readEventSource.js +0 -25
- package/browser/readEventSourceStream.cjs +0 -35
- package/browser/readEventSourceStream.d.ts +0 -7
- package/browser/readEventSourceStream.js +0 -31
- package/composed-function/index.cjs +0 -19
- package/composed-function/index.d.ts +0 -3
- package/composed-function/index.js +0 -3
- package/composed-function/summarize/SummarizationFunction.d.ts +0 -4
- package/composed-function/summarize/summarizeRecursively.cjs +0 -19
- package/composed-function/summarize/summarizeRecursively.d.ts +0 -11
- package/composed-function/summarize/summarizeRecursively.js +0 -15
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +0 -25
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +0 -24
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +0 -21
- package/cost/Cost.cjs +0 -38
- package/cost/Cost.d.ts +0 -16
- package/cost/Cost.js +0 -34
- package/cost/CostCalculator.d.ts +0 -8
- package/cost/calculateCost.cjs +0 -28
- package/cost/calculateCost.d.ts +0 -7
- package/cost/calculateCost.js +0 -24
- package/cost/index.cjs +0 -19
- package/cost/index.d.ts +0 -3
- package/cost/index.js +0 -3
- package/guard/GuardEvent.cjs +0 -2
- package/guard/GuardEvent.d.ts +0 -7
- package/guard/fixStructure.cjs +0 -75
- package/guard/fixStructure.d.ts +0 -64
- package/guard/fixStructure.js +0 -71
- package/guard/guard.cjs +0 -79
- package/guard/guard.d.ts +0 -29
- package/guard/guard.js +0 -75
- package/guard/index.cjs +0 -19
- package/guard/index.d.ts +0 -3
- package/guard/index.js +0 -3
- package/model-function/SuccessfulModelCall.cjs +0 -10
- package/model-function/SuccessfulModelCall.d.ts +0 -12
- package/model-function/SuccessfulModelCall.js +0 -6
- package/model-function/generate-text/prompt-template/Content.d.ts +0 -25
- package/model-provider/openai/OpenAICostCalculator.cjs +0 -89
- package/model-provider/openai/OpenAICostCalculator.d.ts +0 -6
- package/model-provider/openai/OpenAICostCalculator.js +0 -85
- package/model-provider/openai/chat/OpenAIChatModel.test.cjs +0 -61
- package/model-provider/openai/chat/OpenAIChatModel.test.js +0 -59
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.cjs +0 -70
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.js +0 -63
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +0 -156
- package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +0 -19
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +0 -152
- package/server/fastify/AssetStorage.cjs +0 -2
- package/server/fastify/AssetStorage.d.ts +0 -17
- package/server/fastify/DefaultFlow.cjs +0 -22
- package/server/fastify/DefaultFlow.d.ts +0 -16
- package/server/fastify/DefaultFlow.js +0 -18
- package/server/fastify/FileSystemAssetStorage.cjs +0 -60
- package/server/fastify/FileSystemAssetStorage.d.ts +0 -19
- package/server/fastify/FileSystemAssetStorage.js +0 -56
- package/server/fastify/FileSystemLogger.cjs +0 -49
- package/server/fastify/FileSystemLogger.d.ts +0 -18
- package/server/fastify/FileSystemLogger.js +0 -45
- package/server/fastify/Flow.cjs +0 -2
- package/server/fastify/Flow.d.ts +0 -9
- package/server/fastify/FlowRun.cjs +0 -71
- package/server/fastify/FlowRun.d.ts +0 -28
- package/server/fastify/FlowRun.js +0 -67
- package/server/fastify/FlowSchema.cjs +0 -2
- package/server/fastify/FlowSchema.d.ts +0 -5
- package/server/fastify/Logger.cjs +0 -2
- package/server/fastify/Logger.d.ts +0 -13
- package/server/fastify/PathProvider.cjs +0 -34
- package/server/fastify/PathProvider.d.ts +0 -12
- package/server/fastify/PathProvider.js +0 -30
- package/server/fastify/index.cjs +0 -24
- package/server/fastify/index.d.ts +0 -8
- package/server/fastify/index.js +0 -8
- package/server/fastify/modelFusionFlowPlugin.cjs +0 -103
- package/server/fastify/modelFusionFlowPlugin.d.ts +0 -12
- package/server/fastify/modelFusionFlowPlugin.js +0 -99
- package/util/getAudioFileExtension.cjs +0 -29
- package/util/getAudioFileExtension.d.ts +0 -1
- package/util/getAudioFileExtension.js +0 -25
- /package/{composed-function/summarize/SummarizationFunction.cjs → core/ExtensionFunctionEvent.cjs} +0 -0
- /package/{composed-function/summarize/SummarizationFunction.js → core/ExtensionFunctionEvent.js} +0 -0
- /package/{cost/CostCalculator.js → model-provider/anthropic/AnthropicTextGenerationModel.test.d.ts} +0 -0
- /package/{guard/GuardEvent.js → model-provider/cohere/CohereTextGenerationModel.test.d.ts} +0 -0
- /package/model-provider/{openai/chat/OpenAIChatModel.test.d.ts → llamacpp/LlamaCppTextGenerationModel.test.d.ts} +0 -0
- /package/{server/fastify/AssetStorage.js → model-provider/mistral/MistralChatModel.test.d.ts} +0 -0
- /package/{server/fastify/Flow.js → model-provider/ollama/OllamaChatModel.test.d.ts} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.cjs → OpenAIChatMessage.cjs} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.js → OpenAIChatMessage.js} +0 -0
- /package/{server/fastify/FlowSchema.js → model-provider/openai/OpenAIChatModel.test.d.ts} +0 -0
- /package/{server/fastify/Logger.js → model-provider/openai/OpenAICompletionModel.test.d.ts} +0 -0
- /package/model-provider/openai/{chat/countOpenAIChatMessageTokens.d.ts → countOpenAIChatMessageTokens.d.ts} +0 -0
- /package/{cost/CostCalculator.cjs → tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.cjs} +0 -0
@@ -32,16 +32,16 @@ export declare class MistralTextEmbeddingModel extends AbstractModel<MistralText
|
|
32
32
|
doEmbedValues(texts: string[], options?: FunctionOptions): Promise<{
|
33
33
|
response: {
|
34
34
|
object: string;
|
35
|
-
usage: {
|
36
|
-
prompt_tokens: number;
|
37
|
-
total_tokens: number;
|
38
|
-
};
|
39
|
-
model: string;
|
40
35
|
data: {
|
41
36
|
object: string;
|
42
37
|
embedding: number[];
|
43
38
|
index: number;
|
44
39
|
}[];
|
40
|
+
usage: {
|
41
|
+
prompt_tokens: number;
|
42
|
+
total_tokens: number;
|
43
|
+
};
|
44
|
+
model: string;
|
45
45
|
id: string;
|
46
46
|
};
|
47
47
|
embeddings: number[][];
|
@@ -77,29 +77,29 @@ declare const MistralTextEmbeddingResponseSchema: z.ZodObject<{
|
|
77
77
|
}>;
|
78
78
|
}, "strip", z.ZodTypeAny, {
|
79
79
|
object: string;
|
80
|
-
usage: {
|
81
|
-
prompt_tokens: number;
|
82
|
-
total_tokens: number;
|
83
|
-
};
|
84
|
-
model: string;
|
85
80
|
data: {
|
86
81
|
object: string;
|
87
82
|
embedding: number[];
|
88
83
|
index: number;
|
89
84
|
}[];
|
90
|
-
id: string;
|
91
|
-
}, {
|
92
|
-
object: string;
|
93
85
|
usage: {
|
94
86
|
prompt_tokens: number;
|
95
87
|
total_tokens: number;
|
96
88
|
};
|
97
89
|
model: string;
|
90
|
+
id: string;
|
91
|
+
}, {
|
92
|
+
object: string;
|
98
93
|
data: {
|
99
94
|
object: string;
|
100
95
|
embedding: number[];
|
101
96
|
index: number;
|
102
97
|
}[];
|
98
|
+
usage: {
|
99
|
+
prompt_tokens: number;
|
100
|
+
total_tokens: number;
|
101
|
+
};
|
102
|
+
model: string;
|
103
103
|
id: string;
|
104
104
|
}>;
|
105
105
|
export type MistralTextEmbeddingResponse = z.infer<typeof MistralTextEmbeddingResponseSchema>;
|
@@ -12,8 +12,7 @@ const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/gene
|
|
12
12
|
const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
|
13
13
|
const TextGenerationToolCallModel_js_1 = require("../../tool/generate-tool-call/TextGenerationToolCallModel.cjs");
|
14
14
|
const TextGenerationToolCallsOrGenerateTextModel_js_1 = require("../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.cjs");
|
15
|
-
const
|
16
|
-
const parseJsonStream_js_1 = require("../../util/streaming/parseJsonStream.cjs");
|
15
|
+
const createJsonStreamResponseHandler_js_1 = require("../../util/streaming/createJsonStreamResponseHandler.cjs");
|
17
16
|
const OllamaApiConfiguration_js_1 = require("./OllamaApiConfiguration.cjs");
|
18
17
|
const OllamaChatPromptTemplate_js_1 = require("./OllamaChatPromptTemplate.cjs");
|
19
18
|
const OllamaError_js_1 = require("./OllamaError.cjs");
|
@@ -133,6 +132,10 @@ class OllamaChatModel extends AbstractModel_js_1.AbstractModel {
|
|
133
132
|
responseFormat: exports.OllamaChatResponseFormat.deltaIterable,
|
134
133
|
});
|
135
134
|
}
|
135
|
+
extractTextDelta(delta) {
|
136
|
+
const chunk = delta;
|
137
|
+
return chunk.done === true ? undefined : chunk.message.content;
|
138
|
+
}
|
136
139
|
asToolCallGenerationModel(promptTemplate) {
|
137
140
|
return new TextGenerationToolCallModel_js_1.TextGenerationToolCallModel({
|
138
141
|
model: this,
|
@@ -194,7 +197,7 @@ const ollamaChatResponseSchema = zod_1.z.object({
|
|
194
197
|
eval_count: zod_1.z.number(),
|
195
198
|
eval_duration: zod_1.z.number(),
|
196
199
|
});
|
197
|
-
const
|
200
|
+
const ollamaChatStreamChunkSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.discriminatedUnion("done", [
|
198
201
|
zod_1.z.object({
|
199
202
|
done: zod_1.z.literal(false),
|
200
203
|
model: zod_1.z.string(),
|
@@ -216,45 +219,6 @@ const ollamaChatStreamSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.discriminate
|
|
216
219
|
eval_duration: zod_1.z.number(),
|
217
220
|
}),
|
218
221
|
]));
|
219
|
-
async function createOllamaFullDeltaIterableQueue(stream) {
|
220
|
-
const queue = new AsyncQueue_js_1.AsyncQueue();
|
221
|
-
let accumulatedText = "";
|
222
|
-
// process the stream asynchonously (no 'await' on purpose):
|
223
|
-
(0, parseJsonStream_js_1.parseJsonStream)({
|
224
|
-
stream,
|
225
|
-
schema: ollamaChatStreamSchema,
|
226
|
-
process(event) {
|
227
|
-
if (event.done === true) {
|
228
|
-
queue.push({
|
229
|
-
type: "delta",
|
230
|
-
fullDelta: {
|
231
|
-
content: accumulatedText,
|
232
|
-
isComplete: true,
|
233
|
-
delta: "",
|
234
|
-
},
|
235
|
-
valueDelta: "",
|
236
|
-
});
|
237
|
-
}
|
238
|
-
else {
|
239
|
-
const deltaText = event.message.content;
|
240
|
-
accumulatedText += deltaText;
|
241
|
-
queue.push({
|
242
|
-
type: "delta",
|
243
|
-
fullDelta: {
|
244
|
-
content: accumulatedText,
|
245
|
-
isComplete: false,
|
246
|
-
delta: deltaText,
|
247
|
-
},
|
248
|
-
valueDelta: deltaText,
|
249
|
-
});
|
250
|
-
}
|
251
|
-
},
|
252
|
-
onDone() {
|
253
|
-
queue.close();
|
254
|
-
},
|
255
|
-
});
|
256
|
-
return queue;
|
257
|
-
}
|
258
222
|
exports.OllamaChatResponseFormat = {
|
259
223
|
/**
|
260
224
|
* Returns the response as a JSON object.
|
@@ -303,6 +267,6 @@ exports.OllamaChatResponseFormat = {
|
|
303
267
|
*/
|
304
268
|
deltaIterable: {
|
305
269
|
stream: true,
|
306
|
-
handler:
|
270
|
+
handler: (0, createJsonStreamResponseHandler_js_1.createJsonStreamResponseHandler)(ollamaChatStreamChunkSchema),
|
307
271
|
},
|
308
272
|
};
|
@@ -2,13 +2,14 @@ import { z } from "zod";
|
|
2
2
|
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
3
3
|
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
4
4
|
import { ResponseHandler } from "../../core/api/postToApi.js";
|
5
|
+
import { ZodSchema } from "../../core/schema/ZodSchema.js";
|
5
6
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
6
|
-
import { Delta } from "../../model-function/Delta.js";
|
7
7
|
import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
|
8
8
|
import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
|
9
9
|
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
10
10
|
import { TextGenerationToolCallModel, ToolCallPromptTemplate } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
|
11
|
-
import { TextGenerationToolCallsOrGenerateTextModel
|
11
|
+
import { TextGenerationToolCallsOrGenerateTextModel } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
|
12
|
+
import { ToolCallsOrGenerateTextPromptTemplate } from "../../tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js";
|
12
13
|
import { OllamaTextGenerationSettings } from "./OllamaTextGenerationSettings.js";
|
13
14
|
export type OllamaChatMessage = {
|
14
15
|
role: "system" | "user" | "assistant";
|
@@ -38,11 +39,11 @@ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettin
|
|
38
39
|
get settingsForEvent(): Partial<OllamaChatModelSettings>;
|
39
40
|
doGenerateTexts(prompt: OllamaChatPrompt, options?: FunctionOptions): Promise<{
|
40
41
|
response: {
|
41
|
-
model: string;
|
42
42
|
message: {
|
43
43
|
role: string;
|
44
44
|
content: string;
|
45
45
|
};
|
46
|
+
model: string;
|
46
47
|
done: true;
|
47
48
|
created_at: string;
|
48
49
|
total_duration: number;
|
@@ -57,7 +58,26 @@ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettin
|
|
57
58
|
finishReason: "unknown";
|
58
59
|
}[];
|
59
60
|
}>;
|
60
|
-
doStreamText(prompt: OllamaChatPrompt, options?: FunctionOptions): Promise<AsyncIterable<Delta<
|
61
|
+
doStreamText(prompt: OllamaChatPrompt, options?: FunctionOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
|
62
|
+
message: {
|
63
|
+
role: string;
|
64
|
+
content: string;
|
65
|
+
};
|
66
|
+
model: string;
|
67
|
+
done: false;
|
68
|
+
created_at: string;
|
69
|
+
} | {
|
70
|
+
model: string;
|
71
|
+
done: true;
|
72
|
+
created_at: string;
|
73
|
+
total_duration: number;
|
74
|
+
prompt_eval_count: number;
|
75
|
+
eval_count: number;
|
76
|
+
eval_duration: number;
|
77
|
+
load_duration?: number | undefined;
|
78
|
+
prompt_eval_duration?: number | undefined;
|
79
|
+
}>>>;
|
80
|
+
extractTextDelta(delta: unknown): string | undefined;
|
61
81
|
asToolCallGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallPromptTemplate<INPUT_PROMPT, OllamaChatPrompt>): TextGenerationToolCallModel<INPUT_PROMPT, OllamaChatPrompt, this>;
|
62
82
|
asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallsOrGenerateTextPromptTemplate<INPUT_PROMPT, OllamaChatPrompt>): TextGenerationToolCallsOrGenerateTextModel<INPUT_PROMPT, OllamaChatPrompt, this>;
|
63
83
|
/**
|
@@ -96,11 +116,11 @@ declare const ollamaChatResponseSchema: z.ZodObject<{
|
|
96
116
|
eval_count: z.ZodNumber;
|
97
117
|
eval_duration: z.ZodNumber;
|
98
118
|
}, "strip", z.ZodTypeAny, {
|
99
|
-
model: string;
|
100
119
|
message: {
|
101
120
|
role: string;
|
102
121
|
content: string;
|
103
122
|
};
|
123
|
+
model: string;
|
104
124
|
done: true;
|
105
125
|
created_at: string;
|
106
126
|
total_duration: number;
|
@@ -110,11 +130,11 @@ declare const ollamaChatResponseSchema: z.ZodObject<{
|
|
110
130
|
load_duration?: number | undefined;
|
111
131
|
prompt_eval_duration?: number | undefined;
|
112
132
|
}, {
|
113
|
-
model: string;
|
114
133
|
message: {
|
115
134
|
role: string;
|
116
135
|
content: string;
|
117
136
|
};
|
137
|
+
model: string;
|
118
138
|
done: true;
|
119
139
|
created_at: string;
|
120
140
|
total_duration: number;
|
@@ -125,11 +145,26 @@ declare const ollamaChatResponseSchema: z.ZodObject<{
|
|
125
145
|
prompt_eval_duration?: number | undefined;
|
126
146
|
}>;
|
127
147
|
export type OllamaChatResponse = z.infer<typeof ollamaChatResponseSchema>;
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
};
|
148
|
+
declare const ollamaChatStreamChunkSchema: ZodSchema<{
|
149
|
+
message: {
|
150
|
+
role: string;
|
151
|
+
content: string;
|
152
|
+
};
|
153
|
+
model: string;
|
154
|
+
done: false;
|
155
|
+
created_at: string;
|
156
|
+
} | {
|
157
|
+
model: string;
|
158
|
+
done: true;
|
159
|
+
created_at: string;
|
160
|
+
total_duration: number;
|
161
|
+
prompt_eval_count: number;
|
162
|
+
eval_count: number;
|
163
|
+
eval_duration: number;
|
164
|
+
load_duration?: number | undefined;
|
165
|
+
prompt_eval_duration?: number | undefined;
|
166
|
+
}>;
|
167
|
+
export type OllamaChatStreamChunk = (typeof ollamaChatStreamChunkSchema)["_type"];
|
133
168
|
export type OllamaChatResponseFormatType<T> = {
|
134
169
|
stream: boolean;
|
135
170
|
handler: ResponseHandler<T>;
|
@@ -145,11 +180,11 @@ export declare const OllamaChatResponseFormat: {
|
|
145
180
|
requestBodyValues: unknown;
|
146
181
|
response: Response;
|
147
182
|
}) => Promise<{
|
148
|
-
model: string;
|
149
183
|
message: {
|
150
184
|
role: string;
|
151
185
|
content: string;
|
152
186
|
};
|
187
|
+
model: string;
|
153
188
|
done: true;
|
154
189
|
created_at: string;
|
155
190
|
total_duration: number;
|
@@ -165,10 +200,28 @@ export declare const OllamaChatResponseFormat: {
|
|
165
200
|
* of the response stream.
|
166
201
|
*/
|
167
202
|
deltaIterable: {
|
168
|
-
stream:
|
203
|
+
stream: boolean;
|
169
204
|
handler: ({ response }: {
|
170
205
|
response: Response;
|
171
|
-
}) => Promise<AsyncIterable<Delta<
|
206
|
+
}) => Promise<AsyncIterable<import("../../index.js").Delta<{
|
207
|
+
message: {
|
208
|
+
role: string;
|
209
|
+
content: string;
|
210
|
+
};
|
211
|
+
model: string;
|
212
|
+
done: false;
|
213
|
+
created_at: string;
|
214
|
+
} | {
|
215
|
+
model: string;
|
216
|
+
done: true;
|
217
|
+
created_at: string;
|
218
|
+
total_duration: number;
|
219
|
+
prompt_eval_count: number;
|
220
|
+
eval_count: number;
|
221
|
+
eval_duration: number;
|
222
|
+
load_duration?: number | undefined;
|
223
|
+
prompt_eval_duration?: number | undefined;
|
224
|
+
}>>>;
|
172
225
|
};
|
173
226
|
};
|
174
227
|
export {};
|
@@ -8,9 +8,8 @@ import { AbstractModel } from "../../model-function/AbstractModel.js";
|
|
8
8
|
import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
|
9
9
|
import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
|
10
10
|
import { TextGenerationToolCallModel, } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
|
11
|
-
import { TextGenerationToolCallsOrGenerateTextModel
|
12
|
-
import {
|
13
|
-
import { parseJsonStream } from "../../util/streaming/parseJsonStream.js";
|
11
|
+
import { TextGenerationToolCallsOrGenerateTextModel } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
|
12
|
+
import { createJsonStreamResponseHandler } from "../../util/streaming/createJsonStreamResponseHandler.js";
|
14
13
|
import { OllamaApiConfiguration } from "./OllamaApiConfiguration.js";
|
15
14
|
import { chat, instruction, text } from "./OllamaChatPromptTemplate.js";
|
16
15
|
import { failedOllamaCallResponseHandler } from "./OllamaError.js";
|
@@ -130,6 +129,10 @@ export class OllamaChatModel extends AbstractModel {
|
|
130
129
|
responseFormat: OllamaChatResponseFormat.deltaIterable,
|
131
130
|
});
|
132
131
|
}
|
132
|
+
extractTextDelta(delta) {
|
133
|
+
const chunk = delta;
|
134
|
+
return chunk.done === true ? undefined : chunk.message.content;
|
135
|
+
}
|
133
136
|
asToolCallGenerationModel(promptTemplate) {
|
134
137
|
return new TextGenerationToolCallModel({
|
135
138
|
model: this,
|
@@ -190,7 +193,7 @@ const ollamaChatResponseSchema = z.object({
|
|
190
193
|
eval_count: z.number(),
|
191
194
|
eval_duration: z.number(),
|
192
195
|
});
|
193
|
-
const
|
196
|
+
const ollamaChatStreamChunkSchema = new ZodSchema(z.discriminatedUnion("done", [
|
194
197
|
z.object({
|
195
198
|
done: z.literal(false),
|
196
199
|
model: z.string(),
|
@@ -212,45 +215,6 @@ const ollamaChatStreamSchema = new ZodSchema(z.discriminatedUnion("done", [
|
|
212
215
|
eval_duration: z.number(),
|
213
216
|
}),
|
214
217
|
]));
|
215
|
-
async function createOllamaFullDeltaIterableQueue(stream) {
|
216
|
-
const queue = new AsyncQueue();
|
217
|
-
let accumulatedText = "";
|
218
|
-
// process the stream asynchonously (no 'await' on purpose):
|
219
|
-
parseJsonStream({
|
220
|
-
stream,
|
221
|
-
schema: ollamaChatStreamSchema,
|
222
|
-
process(event) {
|
223
|
-
if (event.done === true) {
|
224
|
-
queue.push({
|
225
|
-
type: "delta",
|
226
|
-
fullDelta: {
|
227
|
-
content: accumulatedText,
|
228
|
-
isComplete: true,
|
229
|
-
delta: "",
|
230
|
-
},
|
231
|
-
valueDelta: "",
|
232
|
-
});
|
233
|
-
}
|
234
|
-
else {
|
235
|
-
const deltaText = event.message.content;
|
236
|
-
accumulatedText += deltaText;
|
237
|
-
queue.push({
|
238
|
-
type: "delta",
|
239
|
-
fullDelta: {
|
240
|
-
content: accumulatedText,
|
241
|
-
isComplete: false,
|
242
|
-
delta: deltaText,
|
243
|
-
},
|
244
|
-
valueDelta: deltaText,
|
245
|
-
});
|
246
|
-
}
|
247
|
-
},
|
248
|
-
onDone() {
|
249
|
-
queue.close();
|
250
|
-
},
|
251
|
-
});
|
252
|
-
return queue;
|
253
|
-
}
|
254
218
|
export const OllamaChatResponseFormat = {
|
255
219
|
/**
|
256
220
|
* Returns the response as a JSON object.
|
@@ -299,6 +263,6 @@ export const OllamaChatResponseFormat = {
|
|
299
263
|
*/
|
300
264
|
deltaIterable: {
|
301
265
|
stream: true,
|
302
|
-
handler:
|
266
|
+
handler: createJsonStreamResponseHandler(ollamaChatStreamChunkSchema),
|
303
267
|
},
|
304
268
|
};
|
@@ -0,0 +1,27 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
const streamText_js_1 = require("../../model-function/generate-text/streamText.cjs");
|
4
|
+
const StreamingTestServer_js_1 = require("../../test/StreamingTestServer.cjs");
|
5
|
+
const arrayFromAsync_js_1 = require("../../test/arrayFromAsync.cjs");
|
6
|
+
const OllamaChatModel_js_1 = require("./OllamaChatModel.cjs");
|
7
|
+
describe("streamText", () => {
|
8
|
+
const server = new StreamingTestServer_js_1.StreamingTestServer("http://127.0.0.1:11434/api/chat");
|
9
|
+
server.setupTestEnvironment();
|
10
|
+
it("should return a text stream", async () => {
|
11
|
+
server.responseChunks = [
|
12
|
+
`{"model":"mistral:text","created_at":"2023-12-24T16:49:17.948267Z","message":{"role":"assistant","content":"Hello"},"done":false}\n`,
|
13
|
+
`{"model":"mistral:text","created_at":"2023-12-24T16:49:17.948267Z","message":{"role":"assistant","content":", "},"done":false}\n`,
|
14
|
+
`{"model":"mistral:text","created_at":"2023-12-24T16:49:17.948267Z","message":{"role":"assistant","content":"world!"},"done":false}\n`,
|
15
|
+
`{"model":"mistral:text","created_at":"2023-12-24T16:49:19.927399Z","message":{"role":"assistant","content":""},` +
|
16
|
+
`"done":true,"total_duration":4843619375,"load_duration":1101458,"prompt_eval_count":5,"prompt_eval_duration":199339000,` +
|
17
|
+
`"eval_count":317,"eval_duration":4639772000}\n`,
|
18
|
+
];
|
19
|
+
const stream = await (0, streamText_js_1.streamText)(new OllamaChatModel_js_1.OllamaChatModel({ model: "mistral:text" }).withTextPrompt(), "hello");
|
20
|
+
// note: space moved to last chunk bc of trimming
|
21
|
+
expect(await (0, arrayFromAsync_js_1.arrayFromAsync)(stream)).toStrictEqual([
|
22
|
+
"Hello",
|
23
|
+
",",
|
24
|
+
" world!",
|
25
|
+
]);
|
26
|
+
});
|
27
|
+
});
|
@@ -0,0 +1,25 @@
|
|
1
|
+
import { streamText } from "../../model-function/generate-text/streamText.js";
|
2
|
+
import { StreamingTestServer } from "../../test/StreamingTestServer.js";
|
3
|
+
import { arrayFromAsync } from "../../test/arrayFromAsync.js";
|
4
|
+
import { OllamaChatModel } from "./OllamaChatModel.js";
|
5
|
+
describe("streamText", () => {
|
6
|
+
const server = new StreamingTestServer("http://127.0.0.1:11434/api/chat");
|
7
|
+
server.setupTestEnvironment();
|
8
|
+
it("should return a text stream", async () => {
|
9
|
+
server.responseChunks = [
|
10
|
+
`{"model":"mistral:text","created_at":"2023-12-24T16:49:17.948267Z","message":{"role":"assistant","content":"Hello"},"done":false}\n`,
|
11
|
+
`{"model":"mistral:text","created_at":"2023-12-24T16:49:17.948267Z","message":{"role":"assistant","content":", "},"done":false}\n`,
|
12
|
+
`{"model":"mistral:text","created_at":"2023-12-24T16:49:17.948267Z","message":{"role":"assistant","content":"world!"},"done":false}\n`,
|
13
|
+
`{"model":"mistral:text","created_at":"2023-12-24T16:49:19.927399Z","message":{"role":"assistant","content":""},` +
|
14
|
+
`"done":true,"total_duration":4843619375,"load_duration":1101458,"prompt_eval_count":5,"prompt_eval_duration":199339000,` +
|
15
|
+
`"eval_count":317,"eval_duration":4639772000}\n`,
|
16
|
+
];
|
17
|
+
const stream = await streamText(new OllamaChatModel({ model: "mistral:text" }).withTextPrompt(), "hello");
|
18
|
+
// note: space moved to last chunk bc of trimming
|
19
|
+
expect(await arrayFromAsync(stream)).toStrictEqual([
|
20
|
+
"Hello",
|
21
|
+
",",
|
22
|
+
" world!",
|
23
|
+
]);
|
24
|
+
});
|
25
|
+
});
|
@@ -1,6 +1,8 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.chat = exports.instruction = exports.text = exports.identity = void 0;
|
4
|
+
const ContentPart_js_1 = require("../../model-function/generate-text/prompt-template/ContentPart.cjs");
|
5
|
+
const InvalidPromptError_js_1 = require("../../model-function/generate-text/prompt-template/InvalidPromptError.cjs");
|
4
6
|
/**
|
5
7
|
* OllamaChatPrompt identity chat format.
|
6
8
|
*/
|
@@ -26,9 +28,15 @@ function instruction() {
|
|
26
28
|
format(prompt) {
|
27
29
|
const messages = [];
|
28
30
|
if (prompt.system != null) {
|
29
|
-
messages.push({
|
31
|
+
messages.push({
|
32
|
+
role: "system",
|
33
|
+
content: prompt.system,
|
34
|
+
});
|
30
35
|
}
|
31
|
-
messages.push({
|
36
|
+
messages.push({
|
37
|
+
role: "user",
|
38
|
+
...extractUserContent(prompt.instruction),
|
39
|
+
});
|
32
40
|
return messages;
|
33
41
|
},
|
34
42
|
stopSequences: [],
|
@@ -46,7 +54,29 @@ function chat() {
|
|
46
54
|
messages.push({ role: "system", content: prompt.system });
|
47
55
|
}
|
48
56
|
for (const { role, content } of prompt.messages) {
|
49
|
-
|
57
|
+
switch (role) {
|
58
|
+
case "user": {
|
59
|
+
messages.push({
|
60
|
+
role: "user",
|
61
|
+
...extractUserContent(content),
|
62
|
+
});
|
63
|
+
break;
|
64
|
+
}
|
65
|
+
case "assistant": {
|
66
|
+
messages.push({
|
67
|
+
role: "assistant",
|
68
|
+
content: (0, ContentPart_js_1.validateContentIsString)(content, prompt),
|
69
|
+
});
|
70
|
+
break;
|
71
|
+
}
|
72
|
+
case "tool": {
|
73
|
+
throw new InvalidPromptError_js_1.InvalidPromptError("Tool messages are not supported.", prompt);
|
74
|
+
}
|
75
|
+
default: {
|
76
|
+
const _exhaustiveCheck = role;
|
77
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
78
|
+
}
|
79
|
+
}
|
50
80
|
}
|
51
81
|
return messages;
|
52
82
|
},
|
@@ -54,7 +84,7 @@ function chat() {
|
|
54
84
|
};
|
55
85
|
}
|
56
86
|
exports.chat = chat;
|
57
|
-
function
|
87
|
+
function extractUserContent(input) {
|
58
88
|
if (typeof input === "string") {
|
59
89
|
return { content: input, images: undefined };
|
60
90
|
}
|
@@ -1,3 +1,5 @@
|
|
1
|
+
import { validateContentIsString } from "../../model-function/generate-text/prompt-template/ContentPart.js";
|
2
|
+
import { InvalidPromptError } from "../../model-function/generate-text/prompt-template/InvalidPromptError.js";
|
1
3
|
/**
|
2
4
|
* OllamaChatPrompt identity chat format.
|
3
5
|
*/
|
@@ -21,9 +23,15 @@ export function instruction() {
|
|
21
23
|
format(prompt) {
|
22
24
|
const messages = [];
|
23
25
|
if (prompt.system != null) {
|
24
|
-
messages.push({
|
26
|
+
messages.push({
|
27
|
+
role: "system",
|
28
|
+
content: prompt.system,
|
29
|
+
});
|
25
30
|
}
|
26
|
-
messages.push({
|
31
|
+
messages.push({
|
32
|
+
role: "user",
|
33
|
+
...extractUserContent(prompt.instruction),
|
34
|
+
});
|
27
35
|
return messages;
|
28
36
|
},
|
29
37
|
stopSequences: [],
|
@@ -40,14 +48,36 @@ export function chat() {
|
|
40
48
|
messages.push({ role: "system", content: prompt.system });
|
41
49
|
}
|
42
50
|
for (const { role, content } of prompt.messages) {
|
43
|
-
|
51
|
+
switch (role) {
|
52
|
+
case "user": {
|
53
|
+
messages.push({
|
54
|
+
role: "user",
|
55
|
+
...extractUserContent(content),
|
56
|
+
});
|
57
|
+
break;
|
58
|
+
}
|
59
|
+
case "assistant": {
|
60
|
+
messages.push({
|
61
|
+
role: "assistant",
|
62
|
+
content: validateContentIsString(content, prompt),
|
63
|
+
});
|
64
|
+
break;
|
65
|
+
}
|
66
|
+
case "tool": {
|
67
|
+
throw new InvalidPromptError("Tool messages are not supported.", prompt);
|
68
|
+
}
|
69
|
+
default: {
|
70
|
+
const _exhaustiveCheck = role;
|
71
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
72
|
+
}
|
73
|
+
}
|
44
74
|
}
|
45
75
|
return messages;
|
46
76
|
},
|
47
77
|
stopSequences: [],
|
48
78
|
};
|
49
79
|
}
|
50
|
-
function
|
80
|
+
function extractUserContent(input) {
|
51
81
|
if (typeof input === "string") {
|
52
82
|
return { content: input, images: undefined };
|
53
83
|
}
|