modelfusion 0.104.0 → 0.106.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +60 -0
- package/README.md +8 -10
- package/core/DefaultRun.cjs +0 -4
- package/core/DefaultRun.d.ts +0 -2
- package/core/DefaultRun.js +0 -4
- package/core/ExtensionFunctionEvent.d.ts +11 -0
- package/core/FunctionEvent.d.ts +2 -2
- package/extension/index.cjs +22 -3
- package/extension/index.d.ts +5 -1
- package/extension/index.js +4 -1
- package/index.cjs +0 -3
- package/index.d.ts +0 -3
- package/index.js +0 -3
- package/model-function/Delta.d.ts +1 -2
- package/model-function/executeStreamCall.cjs +6 -4
- package/model-function/executeStreamCall.d.ts +2 -2
- package/model-function/executeStreamCall.js +6 -4
- package/model-function/generate-speech/streamSpeech.cjs +1 -2
- package/model-function/generate-speech/streamSpeech.js +1 -2
- package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +25 -29
- package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +3 -1
- package/model-function/generate-structure/StructureFromTextStreamingModel.js +25 -29
- package/model-function/generate-structure/StructureGenerationModel.d.ts +2 -0
- package/model-function/generate-structure/jsonStructurePrompt.cjs +42 -6
- package/model-function/generate-structure/jsonStructurePrompt.d.ts +12 -1
- package/model-function/generate-structure/jsonStructurePrompt.js +42 -5
- package/model-function/generate-structure/streamStructure.cjs +7 -8
- package/model-function/generate-structure/streamStructure.d.ts +1 -1
- package/model-function/generate-structure/streamStructure.js +7 -8
- package/model-function/generate-text/PromptTemplateFullTextModel.cjs +35 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +41 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.js +31 -0
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextGenerationModel.js +1 -1
- package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +3 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextStreamingModel.js +3 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/index.cjs +1 -0
- package/model-function/generate-text/index.d.ts +1 -0
- package/model-function/generate-text/index.js +1 -0
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +2 -2
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +1 -1
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs +8 -5
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.js +7 -4
- package/model-function/generate-text/prompt-template/ChatPrompt.cjs +42 -0
- package/model-function/generate-text/prompt-template/ChatPrompt.d.ts +27 -5
- package/model-function/generate-text/prompt-template/ChatPrompt.js +41 -1
- package/model-function/generate-text/prompt-template/{Content.cjs → ContentPart.cjs} +1 -1
- package/model-function/generate-text/prompt-template/ContentPart.d.ts +30 -0
- package/model-function/generate-text/prompt-template/{Content.js → ContentPart.js} +1 -1
- package/model-function/generate-text/prompt-template/InstructionPrompt.d.ts +3 -2
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +8 -5
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.d.ts +1 -1
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +6 -3
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs +8 -4
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/TextPromptTemplate.cjs +8 -4
- package/model-function/generate-text/prompt-template/TextPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +7 -3
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/index.cjs +1 -1
- package/model-function/generate-text/prompt-template/index.d.ts +1 -1
- package/model-function/generate-text/prompt-template/index.js +1 -1
- package/model-function/generate-text/streamText.cjs +27 -28
- package/model-function/generate-text/streamText.d.ts +1 -0
- package/model-function/generate-text/streamText.js +27 -28
- package/model-function/index.cjs +0 -1
- package/model-function/index.d.ts +0 -1
- package/model-function/index.js +0 -1
- package/model-provider/anthropic/AnthropicPromptTemplate.cjs +7 -3
- package/model-provider/anthropic/AnthropicPromptTemplate.js +5 -1
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +11 -2
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.cjs +44 -0
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.js +42 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -44
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +49 -15
- package/model-provider/cohere/CohereTextGenerationModel.js +7 -45
- package/model-provider/cohere/CohereTextGenerationModel.test.cjs +33 -0
- package/model-provider/cohere/CohereTextGenerationModel.test.js +31 -0
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +1 -2
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +1 -2
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +6 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +6 -1
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +7 -14
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +171 -20
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +8 -15
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.cjs +37 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.js +35 -0
- package/model-provider/mistral/MistralChatModel.cjs +30 -104
- package/model-provider/mistral/MistralChatModel.d.ts +47 -14
- package/model-provider/mistral/MistralChatModel.js +30 -104
- package/model-provider/mistral/MistralChatModel.test.cjs +51 -0
- package/model-provider/mistral/MistralChatModel.test.js +49 -0
- package/model-provider/mistral/MistralPromptTemplate.cjs +11 -4
- package/model-provider/mistral/MistralPromptTemplate.js +9 -2
- package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +13 -13
- package/model-provider/ollama/OllamaChatModel.cjs +7 -43
- package/model-provider/ollama/OllamaChatModel.d.ts +67 -14
- package/model-provider/ollama/OllamaChatModel.js +8 -44
- package/model-provider/ollama/OllamaChatModel.test.cjs +27 -0
- package/model-provider/ollama/OllamaChatModel.test.js +25 -0
- package/model-provider/ollama/OllamaChatPromptTemplate.cjs +34 -4
- package/model-provider/ollama/OllamaChatPromptTemplate.js +34 -4
- package/model-provider/ollama/OllamaCompletionModel.cjs +22 -43
- package/model-provider/ollama/OllamaCompletionModel.d.ts +67 -10
- package/model-provider/ollama/OllamaCompletionModel.js +24 -45
- package/model-provider/ollama/OllamaCompletionModel.test.cjs +95 -13
- package/model-provider/ollama/OllamaCompletionModel.test.js +72 -13
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.cjs → AbstractOpenAIChatModel.cjs} +71 -15
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.d.ts → AbstractOpenAIChatModel.d.ts} +273 -19
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.js → AbstractOpenAIChatModel.js} +71 -15
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.cjs → OpenAIChatFunctionCallStructureGenerationModel.cjs} +18 -2
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts → OpenAIChatFunctionCallStructureGenerationModel.d.ts} +41 -11
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.js → OpenAIChatFunctionCallStructureGenerationModel.js} +18 -2
- package/model-provider/openai/{chat/OpenAIChatMessage.d.ts → OpenAIChatMessage.d.ts} +3 -3
- package/model-provider/openai/{chat/OpenAIChatModel.cjs → OpenAIChatModel.cjs} +5 -5
- package/model-provider/openai/{chat/OpenAIChatModel.d.ts → OpenAIChatModel.d.ts} +12 -12
- package/model-provider/openai/{chat/OpenAIChatModel.js → OpenAIChatModel.js} +5 -5
- package/model-provider/openai/OpenAIChatModel.test.cjs +94 -0
- package/model-provider/openai/OpenAIChatModel.test.js +92 -0
- package/model-provider/openai/OpenAIChatPromptTemplate.cjs +114 -0
- package/model-provider/openai/{chat/OpenAIChatPromptTemplate.d.ts → OpenAIChatPromptTemplate.d.ts} +3 -3
- package/model-provider/openai/OpenAIChatPromptTemplate.js +107 -0
- package/model-provider/openai/OpenAICompletionModel.cjs +32 -84
- package/model-provider/openai/OpenAICompletionModel.d.ts +27 -10
- package/model-provider/openai/OpenAICompletionModel.js +33 -85
- package/model-provider/openai/OpenAICompletionModel.test.cjs +53 -0
- package/model-provider/openai/OpenAICompletionModel.test.js +51 -0
- package/model-provider/openai/OpenAIFacade.cjs +2 -2
- package/model-provider/openai/OpenAIFacade.d.ts +3 -3
- package/model-provider/openai/OpenAIFacade.js +2 -2
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +12 -12
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +17 -17
- package/model-provider/openai/TikTokenTokenizer.d.ts +1 -1
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.cjs → countOpenAIChatMessageTokens.cjs} +2 -2
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.js → countOpenAIChatMessageTokens.js} +2 -2
- package/model-provider/openai/index.cjs +6 -7
- package/model-provider/openai/index.d.ts +5 -7
- package/model-provider/openai/index.js +5 -6
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +4 -4
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +6 -6
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +4 -4
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +5 -5
- package/package.json +13 -24
- package/test/JsonTestServer.cjs +33 -0
- package/test/JsonTestServer.d.ts +7 -0
- package/test/JsonTestServer.js +29 -0
- package/test/StreamingTestServer.cjs +55 -0
- package/test/StreamingTestServer.d.ts +7 -0
- package/test/StreamingTestServer.js +51 -0
- package/test/arrayFromAsync.cjs +13 -0
- package/test/arrayFromAsync.d.ts +1 -0
- package/test/arrayFromAsync.js +9 -0
- package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.js +1 -1
- package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +1 -11
- package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.d.ts +12 -0
- package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js +1 -0
- package/tool/generate-tool-calls-or-text/index.cjs +1 -0
- package/tool/generate-tool-calls-or-text/index.d.ts +1 -0
- package/tool/generate-tool-calls-or-text/index.js +1 -0
- package/util/index.cjs +0 -1
- package/util/index.d.ts +0 -1
- package/util/index.js +0 -1
- package/util/streaming/createEventSourceResponseHandler.cjs +9 -0
- package/util/streaming/createEventSourceResponseHandler.d.ts +4 -0
- package/util/streaming/createEventSourceResponseHandler.js +5 -0
- package/util/streaming/createJsonStreamResponseHandler.cjs +9 -0
- package/util/streaming/createJsonStreamResponseHandler.d.ts +4 -0
- package/util/streaming/createJsonStreamResponseHandler.js +5 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.cjs +52 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.js +48 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.cjs +21 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.js +17 -0
- package/browser/MediaSourceAppender.cjs +0 -54
- package/browser/MediaSourceAppender.d.ts +0 -11
- package/browser/MediaSourceAppender.js +0 -50
- package/browser/convertAudioChunksToBase64.cjs +0 -8
- package/browser/convertAudioChunksToBase64.d.ts +0 -4
- package/browser/convertAudioChunksToBase64.js +0 -4
- package/browser/convertBlobToBase64.cjs +0 -23
- package/browser/convertBlobToBase64.d.ts +0 -1
- package/browser/convertBlobToBase64.js +0 -19
- package/browser/index.cjs +0 -22
- package/browser/index.d.ts +0 -6
- package/browser/index.js +0 -6
- package/browser/invokeFlow.cjs +0 -23
- package/browser/invokeFlow.d.ts +0 -8
- package/browser/invokeFlow.js +0 -19
- package/browser/readEventSource.cjs +0 -29
- package/browser/readEventSource.d.ts +0 -9
- package/browser/readEventSource.js +0 -25
- package/browser/readEventSourceStream.cjs +0 -35
- package/browser/readEventSourceStream.d.ts +0 -7
- package/browser/readEventSourceStream.js +0 -31
- package/composed-function/index.cjs +0 -19
- package/composed-function/index.d.ts +0 -3
- package/composed-function/index.js +0 -3
- package/composed-function/summarize/SummarizationFunction.d.ts +0 -4
- package/composed-function/summarize/summarizeRecursively.cjs +0 -19
- package/composed-function/summarize/summarizeRecursively.d.ts +0 -11
- package/composed-function/summarize/summarizeRecursively.js +0 -15
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +0 -25
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +0 -24
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +0 -21
- package/cost/Cost.cjs +0 -38
- package/cost/Cost.d.ts +0 -16
- package/cost/Cost.js +0 -34
- package/cost/CostCalculator.d.ts +0 -8
- package/cost/calculateCost.cjs +0 -28
- package/cost/calculateCost.d.ts +0 -7
- package/cost/calculateCost.js +0 -24
- package/cost/index.cjs +0 -19
- package/cost/index.d.ts +0 -3
- package/cost/index.js +0 -3
- package/guard/GuardEvent.cjs +0 -2
- package/guard/GuardEvent.d.ts +0 -7
- package/guard/fixStructure.cjs +0 -75
- package/guard/fixStructure.d.ts +0 -64
- package/guard/fixStructure.js +0 -71
- package/guard/guard.cjs +0 -79
- package/guard/guard.d.ts +0 -29
- package/guard/guard.js +0 -75
- package/guard/index.cjs +0 -19
- package/guard/index.d.ts +0 -3
- package/guard/index.js +0 -3
- package/model-function/SuccessfulModelCall.cjs +0 -10
- package/model-function/SuccessfulModelCall.d.ts +0 -12
- package/model-function/SuccessfulModelCall.js +0 -6
- package/model-function/generate-text/prompt-template/Content.d.ts +0 -25
- package/model-provider/openai/OpenAICostCalculator.cjs +0 -89
- package/model-provider/openai/OpenAICostCalculator.d.ts +0 -6
- package/model-provider/openai/OpenAICostCalculator.js +0 -85
- package/model-provider/openai/chat/OpenAIChatModel.test.cjs +0 -61
- package/model-provider/openai/chat/OpenAIChatModel.test.js +0 -59
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.cjs +0 -70
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.js +0 -63
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +0 -156
- package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +0 -19
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +0 -152
- package/server/fastify/AssetStorage.cjs +0 -2
- package/server/fastify/AssetStorage.d.ts +0 -17
- package/server/fastify/DefaultFlow.cjs +0 -22
- package/server/fastify/DefaultFlow.d.ts +0 -16
- package/server/fastify/DefaultFlow.js +0 -18
- package/server/fastify/FileSystemAssetStorage.cjs +0 -60
- package/server/fastify/FileSystemAssetStorage.d.ts +0 -19
- package/server/fastify/FileSystemAssetStorage.js +0 -56
- package/server/fastify/FileSystemLogger.cjs +0 -49
- package/server/fastify/FileSystemLogger.d.ts +0 -18
- package/server/fastify/FileSystemLogger.js +0 -45
- package/server/fastify/Flow.cjs +0 -2
- package/server/fastify/Flow.d.ts +0 -9
- package/server/fastify/FlowRun.cjs +0 -71
- package/server/fastify/FlowRun.d.ts +0 -28
- package/server/fastify/FlowRun.js +0 -67
- package/server/fastify/FlowSchema.cjs +0 -2
- package/server/fastify/FlowSchema.d.ts +0 -5
- package/server/fastify/Logger.cjs +0 -2
- package/server/fastify/Logger.d.ts +0 -13
- package/server/fastify/PathProvider.cjs +0 -34
- package/server/fastify/PathProvider.d.ts +0 -12
- package/server/fastify/PathProvider.js +0 -30
- package/server/fastify/index.cjs +0 -24
- package/server/fastify/index.d.ts +0 -8
- package/server/fastify/index.js +0 -8
- package/server/fastify/modelFusionFlowPlugin.cjs +0 -103
- package/server/fastify/modelFusionFlowPlugin.d.ts +0 -12
- package/server/fastify/modelFusionFlowPlugin.js +0 -99
- package/util/getAudioFileExtension.cjs +0 -29
- package/util/getAudioFileExtension.d.ts +0 -1
- package/util/getAudioFileExtension.js +0 -25
- /package/{composed-function/summarize/SummarizationFunction.cjs → core/ExtensionFunctionEvent.cjs} +0 -0
- /package/{composed-function/summarize/SummarizationFunction.js → core/ExtensionFunctionEvent.js} +0 -0
- /package/{cost/CostCalculator.js → model-provider/anthropic/AnthropicTextGenerationModel.test.d.ts} +0 -0
- /package/{guard/GuardEvent.js → model-provider/cohere/CohereTextGenerationModel.test.d.ts} +0 -0
- /package/model-provider/{openai/chat/OpenAIChatModel.test.d.ts → llamacpp/LlamaCppTextGenerationModel.test.d.ts} +0 -0
- /package/{server/fastify/AssetStorage.js → model-provider/mistral/MistralChatModel.test.d.ts} +0 -0
- /package/{server/fastify/Flow.js → model-provider/ollama/OllamaChatModel.test.d.ts} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.cjs → OpenAIChatMessage.cjs} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.js → OpenAIChatMessage.js} +0 -0
- /package/{server/fastify/FlowSchema.js → model-provider/openai/OpenAIChatModel.test.d.ts} +0 -0
- /package/{server/fastify/Logger.js → model-provider/openai/OpenAICompletionModel.test.d.ts} +0 -0
- /package/model-provider/openai/{chat/countOpenAIChatMessageTokens.d.ts → countOpenAIChatMessageTokens.d.ts} +0 -0
- /package/{cost/CostCalculator.cjs → tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.cjs} +0 -0
@@ -64,10 +64,13 @@ export declare class OpenAITranscriptionModel extends AbstractModel<OpenAITransc
|
|
64
64
|
doTranscribe(data: OpenAITranscriptionInput, options?: FunctionOptions): Promise<{
|
65
65
|
response: {
|
66
66
|
text: string;
|
67
|
+
task: "transcribe";
|
68
|
+
language: string;
|
69
|
+
duration: number;
|
67
70
|
segments: {
|
68
71
|
text: string;
|
69
|
-
temperature: number;
|
70
72
|
id: number;
|
73
|
+
temperature: number;
|
71
74
|
tokens: number[];
|
72
75
|
seek: number;
|
73
76
|
start: number;
|
@@ -77,9 +80,6 @@ export declare class OpenAITranscriptionModel extends AbstractModel<OpenAITransc
|
|
77
80
|
no_speech_prob: number;
|
78
81
|
transient?: boolean | undefined;
|
79
82
|
}[];
|
80
|
-
task: "transcribe";
|
81
|
-
language: string;
|
82
|
-
duration: number;
|
83
83
|
};
|
84
84
|
transcription: string;
|
85
85
|
}>;
|
@@ -115,8 +115,8 @@ declare const openAITranscriptionVerboseJsonSchema: z.ZodObject<{
|
|
115
115
|
transient: z.ZodOptional<z.ZodBoolean>;
|
116
116
|
}, "strip", z.ZodTypeAny, {
|
117
117
|
text: string;
|
118
|
-
temperature: number;
|
119
118
|
id: number;
|
119
|
+
temperature: number;
|
120
120
|
tokens: number[];
|
121
121
|
seek: number;
|
122
122
|
start: number;
|
@@ -127,8 +127,8 @@ declare const openAITranscriptionVerboseJsonSchema: z.ZodObject<{
|
|
127
127
|
transient?: boolean | undefined;
|
128
128
|
}, {
|
129
129
|
text: string;
|
130
|
-
temperature: number;
|
131
130
|
id: number;
|
131
|
+
temperature: number;
|
132
132
|
tokens: number[];
|
133
133
|
seek: number;
|
134
134
|
start: number;
|
@@ -141,10 +141,13 @@ declare const openAITranscriptionVerboseJsonSchema: z.ZodObject<{
|
|
141
141
|
text: z.ZodString;
|
142
142
|
}, "strip", z.ZodTypeAny, {
|
143
143
|
text: string;
|
144
|
+
task: "transcribe";
|
145
|
+
language: string;
|
146
|
+
duration: number;
|
144
147
|
segments: {
|
145
148
|
text: string;
|
146
|
-
temperature: number;
|
147
149
|
id: number;
|
150
|
+
temperature: number;
|
148
151
|
tokens: number[];
|
149
152
|
seek: number;
|
150
153
|
start: number;
|
@@ -154,15 +157,15 @@ declare const openAITranscriptionVerboseJsonSchema: z.ZodObject<{
|
|
154
157
|
no_speech_prob: number;
|
155
158
|
transient?: boolean | undefined;
|
156
159
|
}[];
|
160
|
+
}, {
|
161
|
+
text: string;
|
157
162
|
task: "transcribe";
|
158
163
|
language: string;
|
159
164
|
duration: number;
|
160
|
-
}, {
|
161
|
-
text: string;
|
162
165
|
segments: {
|
163
166
|
text: string;
|
164
|
-
temperature: number;
|
165
167
|
id: number;
|
168
|
+
temperature: number;
|
166
169
|
tokens: number[];
|
167
170
|
seek: number;
|
168
171
|
start: number;
|
@@ -172,9 +175,6 @@ declare const openAITranscriptionVerboseJsonSchema: z.ZodObject<{
|
|
172
175
|
no_speech_prob: number;
|
173
176
|
transient?: boolean | undefined;
|
174
177
|
}[];
|
175
|
-
task: "transcribe";
|
176
|
-
language: string;
|
177
|
-
duration: number;
|
178
178
|
}>;
|
179
179
|
export type OpenAITranscriptionVerboseJsonResponse = z.infer<typeof openAITranscriptionVerboseJsonSchema>;
|
180
180
|
export type OpenAITranscriptionResponseFormatType<T> = {
|
@@ -192,10 +192,13 @@ export declare const OpenAITranscriptionResponseFormat: {
|
|
192
192
|
type: "verbose_json";
|
193
193
|
handler: ResponseHandler<{
|
194
194
|
text: string;
|
195
|
+
task: "transcribe";
|
196
|
+
language: string;
|
197
|
+
duration: number;
|
195
198
|
segments: {
|
196
199
|
text: string;
|
197
|
-
temperature: number;
|
198
200
|
id: number;
|
201
|
+
temperature: number;
|
199
202
|
tokens: number[];
|
200
203
|
seek: number;
|
201
204
|
start: number;
|
@@ -205,9 +208,6 @@ export declare const OpenAITranscriptionResponseFormat: {
|
|
205
208
|
no_speech_prob: number;
|
206
209
|
transient?: boolean | undefined;
|
207
210
|
}[];
|
208
|
-
task: "transcribe";
|
209
|
-
language: string;
|
210
|
-
duration: number;
|
211
211
|
}>;
|
212
212
|
};
|
213
213
|
text: {
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import { FullTokenizer } from "../../model-function/tokenize-text/Tokenizer.js";
|
2
2
|
import { OpenAITextEmbeddingModelType } from "./OpenAITextEmbeddingModel.js";
|
3
3
|
import { OpenAICompletionBaseModelType } from "./OpenAICompletionModel.js";
|
4
|
-
import { OpenAIChatBaseModelType } from "./
|
4
|
+
import { OpenAIChatBaseModelType } from "./OpenAIChatModel.js";
|
5
5
|
export type TikTokenTokenizerSettings = {
|
6
6
|
model: OpenAIChatBaseModelType | OpenAICompletionBaseModelType | OpenAITextEmbeddingModelType;
|
7
7
|
};
|
@@ -1,8 +1,8 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.countOpenAIChatPromptTokens = exports.countOpenAIChatMessageTokens = exports.OPENAI_CHAT_MESSAGE_BASE_TOKEN_COUNT = exports.OPENAI_CHAT_PROMPT_BASE_TOKEN_COUNT = void 0;
|
4
|
-
const countTokens_js_1 = require("
|
5
|
-
const TikTokenTokenizer_js_1 = require("
|
4
|
+
const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
|
5
|
+
const TikTokenTokenizer_js_1 = require("./TikTokenTokenizer.cjs");
|
6
6
|
const OpenAIChatModel_js_1 = require("./OpenAIChatModel.cjs");
|
7
7
|
/**
|
8
8
|
* Prompt tokens that are included automatically for every full
|
@@ -1,5 +1,5 @@
|
|
1
|
-
import { countTokens } from "
|
2
|
-
import { TikTokenTokenizer } from "
|
1
|
+
import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
|
2
|
+
import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
|
3
3
|
import { getOpenAIChatModelInformation, } from "./OpenAIChatModel.js";
|
4
4
|
/**
|
5
5
|
* Prompt tokens that are included automatically for every full
|
@@ -26,19 +26,18 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
26
26
|
return result;
|
27
27
|
};
|
28
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
29
|
-
exports.
|
29
|
+
exports.openai = exports.OpenAIChatPrompt = void 0;
|
30
|
+
__exportStar(require("./AbstractOpenAIChatModel.cjs"), exports);
|
30
31
|
__exportStar(require("./AzureOpenAIApiConfiguration.cjs"), exports);
|
31
32
|
__exportStar(require("./OpenAIApiConfiguration.cjs"), exports);
|
33
|
+
__exportStar(require("./OpenAIChatMessage.cjs"), exports);
|
34
|
+
__exportStar(require("./OpenAIChatModel.cjs"), exports);
|
35
|
+
exports.OpenAIChatPrompt = __importStar(require("./OpenAIChatPromptTemplate.cjs"));
|
32
36
|
__exportStar(require("./OpenAICompletionModel.cjs"), exports);
|
33
|
-
__exportStar(require("./OpenAICostCalculator.cjs"), exports);
|
34
37
|
exports.openai = __importStar(require("./OpenAIFacade.cjs"));
|
35
38
|
__exportStar(require("./OpenAIImageGenerationModel.cjs"), exports);
|
36
39
|
__exportStar(require("./OpenAISpeechModel.cjs"), exports);
|
37
40
|
__exportStar(require("./OpenAITextEmbeddingModel.cjs"), exports);
|
38
41
|
__exportStar(require("./OpenAITranscriptionModel.cjs"), exports);
|
39
42
|
__exportStar(require("./TikTokenTokenizer.cjs"), exports);
|
40
|
-
__exportStar(require("./
|
41
|
-
__exportStar(require("./chat/OpenAIChatMessage.cjs"), exports);
|
42
|
-
__exportStar(require("./chat/OpenAIChatModel.cjs"), exports);
|
43
|
-
exports.OpenAIChatPrompt = __importStar(require("./chat/OpenAIChatPromptTemplate.cjs"));
|
44
|
-
__exportStar(require("./chat/countOpenAIChatMessageTokens.cjs"), exports);
|
43
|
+
__exportStar(require("./countOpenAIChatMessageTokens.cjs"), exports);
|
@@ -1,7 +1,10 @@
|
|
1
|
+
export * from "./AbstractOpenAIChatModel.js";
|
1
2
|
export * from "./AzureOpenAIApiConfiguration.js";
|
2
3
|
export * from "./OpenAIApiConfiguration.js";
|
4
|
+
export * from "./OpenAIChatMessage.js";
|
5
|
+
export * from "./OpenAIChatModel.js";
|
6
|
+
export * as OpenAIChatPrompt from "./OpenAIChatPromptTemplate.js";
|
3
7
|
export * from "./OpenAICompletionModel.js";
|
4
|
-
export * from "./OpenAICostCalculator.js";
|
5
8
|
export { OpenAIErrorData } from "./OpenAIError.js";
|
6
9
|
export * as openai from "./OpenAIFacade.js";
|
7
10
|
export * from "./OpenAIImageGenerationModel.js";
|
@@ -9,9 +12,4 @@ export * from "./OpenAISpeechModel.js";
|
|
9
12
|
export * from "./OpenAITextEmbeddingModel.js";
|
10
13
|
export * from "./OpenAITranscriptionModel.js";
|
11
14
|
export * from "./TikTokenTokenizer.js";
|
12
|
-
export * from "./
|
13
|
-
export * from "./chat/OpenAIChatMessage.js";
|
14
|
-
export * from "./chat/OpenAIChatModel.js";
|
15
|
-
export * as OpenAIChatPrompt from "./chat/OpenAIChatPromptTemplate.js";
|
16
|
-
export { OpenAIChatDelta } from "./chat/OpenAIChatStreamIterable.js";
|
17
|
-
export * from "./chat/countOpenAIChatMessageTokens.js";
|
15
|
+
export * from "./countOpenAIChatMessageTokens.js";
|
@@ -1,15 +1,14 @@
|
|
1
|
+
export * from "./AbstractOpenAIChatModel.js";
|
1
2
|
export * from "./AzureOpenAIApiConfiguration.js";
|
2
3
|
export * from "./OpenAIApiConfiguration.js";
|
4
|
+
export * from "./OpenAIChatMessage.js";
|
5
|
+
export * from "./OpenAIChatModel.js";
|
6
|
+
export * as OpenAIChatPrompt from "./OpenAIChatPromptTemplate.js";
|
3
7
|
export * from "./OpenAICompletionModel.js";
|
4
|
-
export * from "./OpenAICostCalculator.js";
|
5
8
|
export * as openai from "./OpenAIFacade.js";
|
6
9
|
export * from "./OpenAIImageGenerationModel.js";
|
7
10
|
export * from "./OpenAISpeechModel.js";
|
8
11
|
export * from "./OpenAITextEmbeddingModel.js";
|
9
12
|
export * from "./OpenAITranscriptionModel.js";
|
10
13
|
export * from "./TikTokenTokenizer.js";
|
11
|
-
export * from "./
|
12
|
-
export * from "./chat/OpenAIChatMessage.js";
|
13
|
-
export * from "./chat/OpenAIChatModel.js";
|
14
|
-
export * as OpenAIChatPrompt from "./chat/OpenAIChatPromptTemplate.js";
|
15
|
-
export * from "./chat/countOpenAIChatMessageTokens.js";
|
14
|
+
export * from "./countOpenAIChatMessageTokens.js";
|
@@ -2,10 +2,10 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.OpenAICompatibleChatModel = void 0;
|
4
4
|
const StructureFromTextStreamingModel_js_1 = require("../../model-function/generate-structure/StructureFromTextStreamingModel.cjs");
|
5
|
-
const
|
5
|
+
const PromptTemplateFullTextModel_js_1 = require("../../model-function/generate-text/PromptTemplateFullTextModel.cjs");
|
6
6
|
const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
|
7
|
-
const AbstractOpenAIChatModel_js_1 = require("../openai/
|
8
|
-
const OpenAIChatPromptTemplate_js_1 = require("../openai/
|
7
|
+
const AbstractOpenAIChatModel_js_1 = require("../openai/AbstractOpenAIChatModel.cjs");
|
8
|
+
const OpenAIChatPromptTemplate_js_1 = require("../openai/OpenAIChatPromptTemplate.cjs");
|
9
9
|
/**
|
10
10
|
* Create a text generation model that calls an API that is compatible with OpenAI's chat API.
|
11
11
|
*
|
@@ -82,7 +82,7 @@ class OpenAICompatibleChatModel extends AbstractOpenAIChatModel_js_1.AbstractOpe
|
|
82
82
|
return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.chat)());
|
83
83
|
}
|
84
84
|
withPromptTemplate(promptTemplate) {
|
85
|
-
return new
|
85
|
+
return new PromptTemplateFullTextModel_js_1.PromptTemplateFullTextModel({
|
86
86
|
model: this.withSettings({
|
87
87
|
stopSequences: [
|
88
88
|
...(this.settings.stopSequences ?? []),
|
@@ -1,11 +1,11 @@
|
|
1
1
|
import { StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
|
2
2
|
import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
|
3
|
-
import {
|
3
|
+
import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
|
4
4
|
import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
|
5
5
|
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
6
6
|
import { ToolCallGenerationModel } from "../../tool/generate-tool-call/ToolCallGenerationModel.js";
|
7
7
|
import { ToolCallsOrTextGenerationModel } from "../../tool/generate-tool-calls-or-text/ToolCallsOrTextGenerationModel.js";
|
8
|
-
import { AbstractOpenAIChatCallSettings, AbstractOpenAIChatModel, OpenAIChatPrompt } from "../openai/
|
8
|
+
import { AbstractOpenAIChatCallSettings, AbstractOpenAIChatModel, OpenAIChatPrompt } from "../openai/AbstractOpenAIChatModel.js";
|
9
9
|
export type OpenAICompatibleProviderName = `openaicompatible` | `openaicompatible-${string}`;
|
10
10
|
export interface OpenAICompatibleChatSettings extends TextGenerationModelSettings, Omit<AbstractOpenAIChatCallSettings, "stop" | "maxTokens"> {
|
11
11
|
provider?: OpenAICompatibleProviderName;
|
@@ -31,15 +31,15 @@ export declare class OpenAICompatibleChatModel extends AbstractOpenAIChatModel<O
|
|
31
31
|
/**
|
32
32
|
* Returns this model with a text prompt template.
|
33
33
|
*/
|
34
|
-
withTextPrompt():
|
34
|
+
withTextPrompt(): PromptTemplateFullTextModel<string, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
|
35
35
|
/**
|
36
36
|
* Returns this model with an instruction prompt template.
|
37
37
|
*/
|
38
|
-
withInstructionPrompt():
|
38
|
+
withInstructionPrompt(): PromptTemplateFullTextModel<import("../../index.js").InstructionPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
|
39
39
|
/**
|
40
40
|
* Returns this model with a chat prompt template.
|
41
41
|
*/
|
42
|
-
withChatPrompt():
|
43
|
-
withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt>):
|
42
|
+
withChatPrompt(): PromptTemplateFullTextModel<import("../../index.js").ChatPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
|
43
|
+
withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt>): PromptTemplateFullTextModel<INPUT_PROMPT, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
|
44
44
|
withSettings(additionalSettings: Partial<OpenAICompatibleChatSettings>): this;
|
45
45
|
}
|
@@ -1,8 +1,8 @@
|
|
1
1
|
import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
|
2
|
-
import {
|
2
|
+
import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
|
3
3
|
import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
|
4
|
-
import { AbstractOpenAIChatModel, } from "../openai/
|
5
|
-
import { chat, instruction, text
|
4
|
+
import { AbstractOpenAIChatModel, } from "../openai/AbstractOpenAIChatModel.js";
|
5
|
+
import { chat, instruction, text } from "../openai/OpenAIChatPromptTemplate.js";
|
6
6
|
/**
|
7
7
|
* Create a text generation model that calls an API that is compatible with OpenAI's chat API.
|
8
8
|
*
|
@@ -79,7 +79,7 @@ export class OpenAICompatibleChatModel extends AbstractOpenAIChatModel {
|
|
79
79
|
return this.withPromptTemplate(chat());
|
80
80
|
}
|
81
81
|
withPromptTemplate(promptTemplate) {
|
82
|
-
return new
|
82
|
+
return new PromptTemplateFullTextModel({
|
83
83
|
model: this.withSettings({
|
84
84
|
stopSequences: [
|
85
85
|
...(this.settings.stopSequences ?? []),
|
@@ -69,8 +69,8 @@ export declare class StabilityImageGenerationModel extends AbstractModel<Stabili
|
|
69
69
|
doGenerateImages(prompt: StabilityImageGenerationPrompt, options?: FunctionOptions): Promise<{
|
70
70
|
response: {
|
71
71
|
artifacts: {
|
72
|
-
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
73
72
|
base64: string;
|
73
|
+
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
74
74
|
seed: number;
|
75
75
|
}[];
|
76
76
|
};
|
@@ -86,24 +86,24 @@ declare const stabilityImageGenerationResponseSchema: z.ZodObject<{
|
|
86
86
|
seed: z.ZodNumber;
|
87
87
|
finishReason: z.ZodEnum<["SUCCESS", "ERROR", "CONTENT_FILTERED"]>;
|
88
88
|
}, "strip", z.ZodTypeAny, {
|
89
|
-
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
90
89
|
base64: string;
|
90
|
+
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
91
91
|
seed: number;
|
92
92
|
}, {
|
93
|
-
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
94
93
|
base64: string;
|
94
|
+
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
95
95
|
seed: number;
|
96
96
|
}>, "many">;
|
97
97
|
}, "strip", z.ZodTypeAny, {
|
98
98
|
artifacts: {
|
99
|
-
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
100
99
|
base64: string;
|
100
|
+
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
101
101
|
seed: number;
|
102
102
|
}[];
|
103
103
|
}, {
|
104
104
|
artifacts: {
|
105
|
-
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
106
105
|
base64: string;
|
106
|
+
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
107
107
|
seed: number;
|
108
108
|
}[];
|
109
109
|
}>;
|
package/package.json
CHANGED
@@ -1,20 +1,19 @@
|
|
1
1
|
{
|
2
2
|
"name": "modelfusion",
|
3
3
|
"description": "The TypeScript library for building multi-modal AI applications.",
|
4
|
-
"version": "0.
|
4
|
+
"version": "0.106.0",
|
5
5
|
"author": "Lars Grammel",
|
6
6
|
"license": "MIT",
|
7
7
|
"keywords": [
|
8
|
+
"ai",
|
8
9
|
"llm",
|
10
|
+
"multimodal",
|
9
11
|
"embedding",
|
10
12
|
"openai",
|
11
|
-
"huggingface",
|
12
|
-
"gpt-3",
|
13
13
|
"gpt-4",
|
14
|
-
"
|
15
|
-
"
|
16
|
-
"
|
17
|
-
"multimodal"
|
14
|
+
"ollama",
|
15
|
+
"llamacpp",
|
16
|
+
"whisper"
|
18
17
|
],
|
19
18
|
"homepage": "https://modelfusion.dev/",
|
20
19
|
"repository": {
|
@@ -47,30 +46,21 @@
|
|
47
46
|
"types": "./extension/index.d.ts",
|
48
47
|
"import": "./extension/index.js",
|
49
48
|
"require": "./extension/index.cjs"
|
50
|
-
},
|
51
|
-
"./browser": {
|
52
|
-
"types": "./browser/index.d.ts",
|
53
|
-
"import": "./browser/index.js",
|
54
|
-
"require": "./browser/index.cjs"
|
55
|
-
},
|
56
|
-
"./fastify-server": {
|
57
|
-
"types": "./server/fastify/index.d.ts",
|
58
|
-
"import": "./server/fastify/index.js",
|
59
|
-
"require": "./server/fastify/index.cjs"
|
60
49
|
}
|
61
50
|
},
|
62
51
|
"scripts": {
|
63
52
|
"lint": "eslint --ext .ts src",
|
64
|
-
"clean": "rimraf build dist .turbo",
|
53
|
+
"clean": "rimraf build dist .turbo node_modules",
|
54
|
+
"clean:build": "rimraf build dist",
|
65
55
|
"build": "pnpm build:esm && pnpm build:cjs && pnpm build:copy-files",
|
66
56
|
"build:esm": "tsc --outDir dist/",
|
67
57
|
"build:cjs": "tsc --outDir build/cjs/ -p tsconfig.cjs.json && node bin/prepare-cjs.js",
|
68
58
|
"build:copy-files": "copyfiles --flat package.json ../../README.md ../../LICENSE ../../CHANGELOG.md dist",
|
69
59
|
"test": "vitest --config vitest.config.js --run src",
|
70
|
-
"test:watch": "vitest watch--config vitest.config.js",
|
71
|
-
"test:coverage": "vitest run --coverage",
|
72
|
-
"test:coverage:ui": "vitest
|
73
|
-
"dist": "pnpm clean && pnpm lint && pnpm test && pnpm build"
|
60
|
+
"test:watch": "vitest watch --config vitest.config.js",
|
61
|
+
"test:coverage": "vitest run --config vitest.config.js --coverage",
|
62
|
+
"test:coverage:ui": "vitest --config vitest.config.js --coverage --ui",
|
63
|
+
"dist": "pnpm clean:build && pnpm lint && pnpm test && pnpm build"
|
74
64
|
},
|
75
65
|
"dependencies": {
|
76
66
|
"eventsource-parser": "1.1.1",
|
@@ -90,7 +80,6 @@
|
|
90
80
|
"@vitest/ui": "1.1.0",
|
91
81
|
"eslint": "^8.45.0",
|
92
82
|
"eslint-config-prettier": "9.1.0",
|
93
|
-
"
|
94
|
-
"msw": "2.0.10"
|
83
|
+
"msw": "2.0.11"
|
95
84
|
}
|
96
85
|
}
|
@@ -0,0 +1,33 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.JsonTestServer = void 0;
|
4
|
+
const msw_1 = require("msw");
|
5
|
+
const node_1 = require("msw/node");
|
6
|
+
class JsonTestServer {
|
7
|
+
constructor(url) {
|
8
|
+
Object.defineProperty(this, "server", {
|
9
|
+
enumerable: true,
|
10
|
+
configurable: true,
|
11
|
+
writable: true,
|
12
|
+
value: void 0
|
13
|
+
});
|
14
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
15
|
+
Object.defineProperty(this, "responseBodyJson", {
|
16
|
+
enumerable: true,
|
17
|
+
configurable: true,
|
18
|
+
writable: true,
|
19
|
+
value: {}
|
20
|
+
});
|
21
|
+
const responseBodyJson = () => this.responseBodyJson;
|
22
|
+
this.server = (0, node_1.setupServer)(msw_1.http.post(url, () => msw_1.HttpResponse.json(responseBodyJson())));
|
23
|
+
}
|
24
|
+
setupTestEnvironment() {
|
25
|
+
beforeAll(() => this.server.listen());
|
26
|
+
beforeEach(() => {
|
27
|
+
this.responseBodyJson = {};
|
28
|
+
});
|
29
|
+
afterEach(() => this.server.resetHandlers());
|
30
|
+
afterAll(() => this.server.close());
|
31
|
+
}
|
32
|
+
}
|
33
|
+
exports.JsonTestServer = JsonTestServer;
|
@@ -0,0 +1,29 @@
|
|
1
|
+
import { HttpResponse, http } from "msw";
|
2
|
+
import { setupServer } from "msw/node";
|
3
|
+
export class JsonTestServer {
|
4
|
+
constructor(url) {
|
5
|
+
Object.defineProperty(this, "server", {
|
6
|
+
enumerable: true,
|
7
|
+
configurable: true,
|
8
|
+
writable: true,
|
9
|
+
value: void 0
|
10
|
+
});
|
11
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
12
|
+
Object.defineProperty(this, "responseBodyJson", {
|
13
|
+
enumerable: true,
|
14
|
+
configurable: true,
|
15
|
+
writable: true,
|
16
|
+
value: {}
|
17
|
+
});
|
18
|
+
const responseBodyJson = () => this.responseBodyJson;
|
19
|
+
this.server = setupServer(http.post(url, () => HttpResponse.json(responseBodyJson())));
|
20
|
+
}
|
21
|
+
setupTestEnvironment() {
|
22
|
+
beforeAll(() => this.server.listen());
|
23
|
+
beforeEach(() => {
|
24
|
+
this.responseBodyJson = {};
|
25
|
+
});
|
26
|
+
afterEach(() => this.server.resetHandlers());
|
27
|
+
afterAll(() => this.server.close());
|
28
|
+
}
|
29
|
+
}
|
@@ -0,0 +1,55 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.StreamingTestServer = void 0;
|
4
|
+
const msw_1 = require("msw");
|
5
|
+
const node_1 = require("msw/node");
|
6
|
+
class StreamingTestServer {
|
7
|
+
constructor(url) {
|
8
|
+
Object.defineProperty(this, "server", {
|
9
|
+
enumerable: true,
|
10
|
+
configurable: true,
|
11
|
+
writable: true,
|
12
|
+
value: void 0
|
13
|
+
});
|
14
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
15
|
+
Object.defineProperty(this, "responseChunks", {
|
16
|
+
enumerable: true,
|
17
|
+
configurable: true,
|
18
|
+
writable: true,
|
19
|
+
value: []
|
20
|
+
});
|
21
|
+
const responseChunks = () => this.responseChunks;
|
22
|
+
this.server = (0, node_1.setupServer)(msw_1.http.post(url, () => {
|
23
|
+
const encoder = new TextEncoder();
|
24
|
+
const stream = new ReadableStream({
|
25
|
+
async start(controller) {
|
26
|
+
try {
|
27
|
+
for (const chunk of responseChunks()) {
|
28
|
+
controller.enqueue(encoder.encode(chunk));
|
29
|
+
}
|
30
|
+
}
|
31
|
+
finally {
|
32
|
+
controller.close();
|
33
|
+
}
|
34
|
+
},
|
35
|
+
});
|
36
|
+
return new msw_1.HttpResponse(stream, {
|
37
|
+
status: 200,
|
38
|
+
headers: {
|
39
|
+
"Content-Type": "text/event-stream",
|
40
|
+
"Cache-Control": "no-cache",
|
41
|
+
Connection: "keep-alive",
|
42
|
+
},
|
43
|
+
});
|
44
|
+
}));
|
45
|
+
}
|
46
|
+
setupTestEnvironment() {
|
47
|
+
beforeAll(() => this.server.listen());
|
48
|
+
beforeEach(() => {
|
49
|
+
this.responseChunks = [];
|
50
|
+
});
|
51
|
+
afterEach(() => this.server.resetHandlers());
|
52
|
+
afterAll(() => this.server.close());
|
53
|
+
}
|
54
|
+
}
|
55
|
+
exports.StreamingTestServer = StreamingTestServer;
|
@@ -0,0 +1,51 @@
|
|
1
|
+
import { HttpResponse, http } from "msw";
|
2
|
+
import { setupServer } from "msw/node";
|
3
|
+
export class StreamingTestServer {
|
4
|
+
constructor(url) {
|
5
|
+
Object.defineProperty(this, "server", {
|
6
|
+
enumerable: true,
|
7
|
+
configurable: true,
|
8
|
+
writable: true,
|
9
|
+
value: void 0
|
10
|
+
});
|
11
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
12
|
+
Object.defineProperty(this, "responseChunks", {
|
13
|
+
enumerable: true,
|
14
|
+
configurable: true,
|
15
|
+
writable: true,
|
16
|
+
value: []
|
17
|
+
});
|
18
|
+
const responseChunks = () => this.responseChunks;
|
19
|
+
this.server = setupServer(http.post(url, () => {
|
20
|
+
const encoder = new TextEncoder();
|
21
|
+
const stream = new ReadableStream({
|
22
|
+
async start(controller) {
|
23
|
+
try {
|
24
|
+
for (const chunk of responseChunks()) {
|
25
|
+
controller.enqueue(encoder.encode(chunk));
|
26
|
+
}
|
27
|
+
}
|
28
|
+
finally {
|
29
|
+
controller.close();
|
30
|
+
}
|
31
|
+
},
|
32
|
+
});
|
33
|
+
return new HttpResponse(stream, {
|
34
|
+
status: 200,
|
35
|
+
headers: {
|
36
|
+
"Content-Type": "text/event-stream",
|
37
|
+
"Cache-Control": "no-cache",
|
38
|
+
Connection: "keep-alive",
|
39
|
+
},
|
40
|
+
});
|
41
|
+
}));
|
42
|
+
}
|
43
|
+
setupTestEnvironment() {
|
44
|
+
beforeAll(() => this.server.listen());
|
45
|
+
beforeEach(() => {
|
46
|
+
this.responseChunks = [];
|
47
|
+
});
|
48
|
+
afterEach(() => this.server.resetHandlers());
|
49
|
+
afterAll(() => this.server.close());
|
50
|
+
}
|
51
|
+
}
|
@@ -0,0 +1,13 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.arrayFromAsync = void 0;
|
4
|
+
// TODO once Array.fromAsync is in Node.js,
|
5
|
+
// use Array.fromAsync instead of this function
|
6
|
+
async function arrayFromAsync(iterable) {
|
7
|
+
const result = [];
|
8
|
+
for await (const item of iterable) {
|
9
|
+
result.push(item);
|
10
|
+
}
|
11
|
+
return result;
|
12
|
+
}
|
13
|
+
exports.arrayFromAsync = arrayFromAsync;
|
@@ -0,0 +1 @@
|
|
1
|
+
export declare function arrayFromAsync<T>(iterable: AsyncIterable<T>): Promise<T[]>;
|
@@ -4,7 +4,7 @@ import { ToolDefinition } from "../ToolDefinition.js";
|
|
4
4
|
import { ToolCallGenerationModel } from "./ToolCallGenerationModel.js";
|
5
5
|
export interface ToolCallPromptTemplate<SOURCE_PROMPT, TARGET_PROMPT> {
|
6
6
|
createPrompt: (prompt: SOURCE_PROMPT, tool: ToolDefinition<string, unknown>) => TARGET_PROMPT;
|
7
|
-
extractToolCall: (response: string) => {
|
7
|
+
extractToolCall: (response: string, tool: ToolDefinition<string, unknown>) => {
|
8
8
|
id: string;
|
9
9
|
args: unknown;
|
10
10
|
} | null;
|