modelfusion 0.104.0 → 0.106.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +60 -0
- package/README.md +8 -10
- package/core/DefaultRun.cjs +0 -4
- package/core/DefaultRun.d.ts +0 -2
- package/core/DefaultRun.js +0 -4
- package/core/ExtensionFunctionEvent.d.ts +11 -0
- package/core/FunctionEvent.d.ts +2 -2
- package/extension/index.cjs +22 -3
- package/extension/index.d.ts +5 -1
- package/extension/index.js +4 -1
- package/index.cjs +0 -3
- package/index.d.ts +0 -3
- package/index.js +0 -3
- package/model-function/Delta.d.ts +1 -2
- package/model-function/executeStreamCall.cjs +6 -4
- package/model-function/executeStreamCall.d.ts +2 -2
- package/model-function/executeStreamCall.js +6 -4
- package/model-function/generate-speech/streamSpeech.cjs +1 -2
- package/model-function/generate-speech/streamSpeech.js +1 -2
- package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +25 -29
- package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +3 -1
- package/model-function/generate-structure/StructureFromTextStreamingModel.js +25 -29
- package/model-function/generate-structure/StructureGenerationModel.d.ts +2 -0
- package/model-function/generate-structure/jsonStructurePrompt.cjs +42 -6
- package/model-function/generate-structure/jsonStructurePrompt.d.ts +12 -1
- package/model-function/generate-structure/jsonStructurePrompt.js +42 -5
- package/model-function/generate-structure/streamStructure.cjs +7 -8
- package/model-function/generate-structure/streamStructure.d.ts +1 -1
- package/model-function/generate-structure/streamStructure.js +7 -8
- package/model-function/generate-text/PromptTemplateFullTextModel.cjs +35 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +41 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.js +31 -0
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextGenerationModel.js +1 -1
- package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +3 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextStreamingModel.js +3 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/index.cjs +1 -0
- package/model-function/generate-text/index.d.ts +1 -0
- package/model-function/generate-text/index.js +1 -0
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +2 -2
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +1 -1
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs +8 -5
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.js +7 -4
- package/model-function/generate-text/prompt-template/ChatPrompt.cjs +42 -0
- package/model-function/generate-text/prompt-template/ChatPrompt.d.ts +27 -5
- package/model-function/generate-text/prompt-template/ChatPrompt.js +41 -1
- package/model-function/generate-text/prompt-template/{Content.cjs → ContentPart.cjs} +1 -1
- package/model-function/generate-text/prompt-template/ContentPart.d.ts +30 -0
- package/model-function/generate-text/prompt-template/{Content.js → ContentPart.js} +1 -1
- package/model-function/generate-text/prompt-template/InstructionPrompt.d.ts +3 -2
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +8 -5
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.d.ts +1 -1
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +6 -3
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs +8 -4
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/TextPromptTemplate.cjs +8 -4
- package/model-function/generate-text/prompt-template/TextPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +7 -3
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/index.cjs +1 -1
- package/model-function/generate-text/prompt-template/index.d.ts +1 -1
- package/model-function/generate-text/prompt-template/index.js +1 -1
- package/model-function/generate-text/streamText.cjs +27 -28
- package/model-function/generate-text/streamText.d.ts +1 -0
- package/model-function/generate-text/streamText.js +27 -28
- package/model-function/index.cjs +0 -1
- package/model-function/index.d.ts +0 -1
- package/model-function/index.js +0 -1
- package/model-provider/anthropic/AnthropicPromptTemplate.cjs +7 -3
- package/model-provider/anthropic/AnthropicPromptTemplate.js +5 -1
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +11 -2
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.cjs +44 -0
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.js +42 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -44
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +49 -15
- package/model-provider/cohere/CohereTextGenerationModel.js +7 -45
- package/model-provider/cohere/CohereTextGenerationModel.test.cjs +33 -0
- package/model-provider/cohere/CohereTextGenerationModel.test.js +31 -0
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +1 -2
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +1 -2
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +6 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +6 -1
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +7 -14
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +171 -20
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +8 -15
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.cjs +37 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.js +35 -0
- package/model-provider/mistral/MistralChatModel.cjs +30 -104
- package/model-provider/mistral/MistralChatModel.d.ts +47 -14
- package/model-provider/mistral/MistralChatModel.js +30 -104
- package/model-provider/mistral/MistralChatModel.test.cjs +51 -0
- package/model-provider/mistral/MistralChatModel.test.js +49 -0
- package/model-provider/mistral/MistralPromptTemplate.cjs +11 -4
- package/model-provider/mistral/MistralPromptTemplate.js +9 -2
- package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +13 -13
- package/model-provider/ollama/OllamaChatModel.cjs +7 -43
- package/model-provider/ollama/OllamaChatModel.d.ts +67 -14
- package/model-provider/ollama/OllamaChatModel.js +8 -44
- package/model-provider/ollama/OllamaChatModel.test.cjs +27 -0
- package/model-provider/ollama/OllamaChatModel.test.js +25 -0
- package/model-provider/ollama/OllamaChatPromptTemplate.cjs +34 -4
- package/model-provider/ollama/OllamaChatPromptTemplate.js +34 -4
- package/model-provider/ollama/OllamaCompletionModel.cjs +22 -43
- package/model-provider/ollama/OllamaCompletionModel.d.ts +67 -10
- package/model-provider/ollama/OllamaCompletionModel.js +24 -45
- package/model-provider/ollama/OllamaCompletionModel.test.cjs +95 -13
- package/model-provider/ollama/OllamaCompletionModel.test.js +72 -13
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.cjs → AbstractOpenAIChatModel.cjs} +71 -15
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.d.ts → AbstractOpenAIChatModel.d.ts} +273 -19
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.js → AbstractOpenAIChatModel.js} +71 -15
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.cjs → OpenAIChatFunctionCallStructureGenerationModel.cjs} +18 -2
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts → OpenAIChatFunctionCallStructureGenerationModel.d.ts} +41 -11
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.js → OpenAIChatFunctionCallStructureGenerationModel.js} +18 -2
- package/model-provider/openai/{chat/OpenAIChatMessage.d.ts → OpenAIChatMessage.d.ts} +3 -3
- package/model-provider/openai/{chat/OpenAIChatModel.cjs → OpenAIChatModel.cjs} +5 -5
- package/model-provider/openai/{chat/OpenAIChatModel.d.ts → OpenAIChatModel.d.ts} +12 -12
- package/model-provider/openai/{chat/OpenAIChatModel.js → OpenAIChatModel.js} +5 -5
- package/model-provider/openai/OpenAIChatModel.test.cjs +94 -0
- package/model-provider/openai/OpenAIChatModel.test.js +92 -0
- package/model-provider/openai/OpenAIChatPromptTemplate.cjs +114 -0
- package/model-provider/openai/{chat/OpenAIChatPromptTemplate.d.ts → OpenAIChatPromptTemplate.d.ts} +3 -3
- package/model-provider/openai/OpenAIChatPromptTemplate.js +107 -0
- package/model-provider/openai/OpenAICompletionModel.cjs +32 -84
- package/model-provider/openai/OpenAICompletionModel.d.ts +27 -10
- package/model-provider/openai/OpenAICompletionModel.js +33 -85
- package/model-provider/openai/OpenAICompletionModel.test.cjs +53 -0
- package/model-provider/openai/OpenAICompletionModel.test.js +51 -0
- package/model-provider/openai/OpenAIFacade.cjs +2 -2
- package/model-provider/openai/OpenAIFacade.d.ts +3 -3
- package/model-provider/openai/OpenAIFacade.js +2 -2
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +12 -12
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +17 -17
- package/model-provider/openai/TikTokenTokenizer.d.ts +1 -1
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.cjs → countOpenAIChatMessageTokens.cjs} +2 -2
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.js → countOpenAIChatMessageTokens.js} +2 -2
- package/model-provider/openai/index.cjs +6 -7
- package/model-provider/openai/index.d.ts +5 -7
- package/model-provider/openai/index.js +5 -6
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +4 -4
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +6 -6
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +4 -4
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +5 -5
- package/package.json +13 -24
- package/test/JsonTestServer.cjs +33 -0
- package/test/JsonTestServer.d.ts +7 -0
- package/test/JsonTestServer.js +29 -0
- package/test/StreamingTestServer.cjs +55 -0
- package/test/StreamingTestServer.d.ts +7 -0
- package/test/StreamingTestServer.js +51 -0
- package/test/arrayFromAsync.cjs +13 -0
- package/test/arrayFromAsync.d.ts +1 -0
- package/test/arrayFromAsync.js +9 -0
- package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.js +1 -1
- package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +1 -11
- package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.d.ts +12 -0
- package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js +1 -0
- package/tool/generate-tool-calls-or-text/index.cjs +1 -0
- package/tool/generate-tool-calls-or-text/index.d.ts +1 -0
- package/tool/generate-tool-calls-or-text/index.js +1 -0
- package/util/index.cjs +0 -1
- package/util/index.d.ts +0 -1
- package/util/index.js +0 -1
- package/util/streaming/createEventSourceResponseHandler.cjs +9 -0
- package/util/streaming/createEventSourceResponseHandler.d.ts +4 -0
- package/util/streaming/createEventSourceResponseHandler.js +5 -0
- package/util/streaming/createJsonStreamResponseHandler.cjs +9 -0
- package/util/streaming/createJsonStreamResponseHandler.d.ts +4 -0
- package/util/streaming/createJsonStreamResponseHandler.js +5 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.cjs +52 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.js +48 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.cjs +21 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.js +17 -0
- package/browser/MediaSourceAppender.cjs +0 -54
- package/browser/MediaSourceAppender.d.ts +0 -11
- package/browser/MediaSourceAppender.js +0 -50
- package/browser/convertAudioChunksToBase64.cjs +0 -8
- package/browser/convertAudioChunksToBase64.d.ts +0 -4
- package/browser/convertAudioChunksToBase64.js +0 -4
- package/browser/convertBlobToBase64.cjs +0 -23
- package/browser/convertBlobToBase64.d.ts +0 -1
- package/browser/convertBlobToBase64.js +0 -19
- package/browser/index.cjs +0 -22
- package/browser/index.d.ts +0 -6
- package/browser/index.js +0 -6
- package/browser/invokeFlow.cjs +0 -23
- package/browser/invokeFlow.d.ts +0 -8
- package/browser/invokeFlow.js +0 -19
- package/browser/readEventSource.cjs +0 -29
- package/browser/readEventSource.d.ts +0 -9
- package/browser/readEventSource.js +0 -25
- package/browser/readEventSourceStream.cjs +0 -35
- package/browser/readEventSourceStream.d.ts +0 -7
- package/browser/readEventSourceStream.js +0 -31
- package/composed-function/index.cjs +0 -19
- package/composed-function/index.d.ts +0 -3
- package/composed-function/index.js +0 -3
- package/composed-function/summarize/SummarizationFunction.d.ts +0 -4
- package/composed-function/summarize/summarizeRecursively.cjs +0 -19
- package/composed-function/summarize/summarizeRecursively.d.ts +0 -11
- package/composed-function/summarize/summarizeRecursively.js +0 -15
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +0 -25
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +0 -24
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +0 -21
- package/cost/Cost.cjs +0 -38
- package/cost/Cost.d.ts +0 -16
- package/cost/Cost.js +0 -34
- package/cost/CostCalculator.d.ts +0 -8
- package/cost/calculateCost.cjs +0 -28
- package/cost/calculateCost.d.ts +0 -7
- package/cost/calculateCost.js +0 -24
- package/cost/index.cjs +0 -19
- package/cost/index.d.ts +0 -3
- package/cost/index.js +0 -3
- package/guard/GuardEvent.cjs +0 -2
- package/guard/GuardEvent.d.ts +0 -7
- package/guard/fixStructure.cjs +0 -75
- package/guard/fixStructure.d.ts +0 -64
- package/guard/fixStructure.js +0 -71
- package/guard/guard.cjs +0 -79
- package/guard/guard.d.ts +0 -29
- package/guard/guard.js +0 -75
- package/guard/index.cjs +0 -19
- package/guard/index.d.ts +0 -3
- package/guard/index.js +0 -3
- package/model-function/SuccessfulModelCall.cjs +0 -10
- package/model-function/SuccessfulModelCall.d.ts +0 -12
- package/model-function/SuccessfulModelCall.js +0 -6
- package/model-function/generate-text/prompt-template/Content.d.ts +0 -25
- package/model-provider/openai/OpenAICostCalculator.cjs +0 -89
- package/model-provider/openai/OpenAICostCalculator.d.ts +0 -6
- package/model-provider/openai/OpenAICostCalculator.js +0 -85
- package/model-provider/openai/chat/OpenAIChatModel.test.cjs +0 -61
- package/model-provider/openai/chat/OpenAIChatModel.test.js +0 -59
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.cjs +0 -70
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.js +0 -63
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +0 -156
- package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +0 -19
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +0 -152
- package/server/fastify/AssetStorage.cjs +0 -2
- package/server/fastify/AssetStorage.d.ts +0 -17
- package/server/fastify/DefaultFlow.cjs +0 -22
- package/server/fastify/DefaultFlow.d.ts +0 -16
- package/server/fastify/DefaultFlow.js +0 -18
- package/server/fastify/FileSystemAssetStorage.cjs +0 -60
- package/server/fastify/FileSystemAssetStorage.d.ts +0 -19
- package/server/fastify/FileSystemAssetStorage.js +0 -56
- package/server/fastify/FileSystemLogger.cjs +0 -49
- package/server/fastify/FileSystemLogger.d.ts +0 -18
- package/server/fastify/FileSystemLogger.js +0 -45
- package/server/fastify/Flow.cjs +0 -2
- package/server/fastify/Flow.d.ts +0 -9
- package/server/fastify/FlowRun.cjs +0 -71
- package/server/fastify/FlowRun.d.ts +0 -28
- package/server/fastify/FlowRun.js +0 -67
- package/server/fastify/FlowSchema.cjs +0 -2
- package/server/fastify/FlowSchema.d.ts +0 -5
- package/server/fastify/Logger.cjs +0 -2
- package/server/fastify/Logger.d.ts +0 -13
- package/server/fastify/PathProvider.cjs +0 -34
- package/server/fastify/PathProvider.d.ts +0 -12
- package/server/fastify/PathProvider.js +0 -30
- package/server/fastify/index.cjs +0 -24
- package/server/fastify/index.d.ts +0 -8
- package/server/fastify/index.js +0 -8
- package/server/fastify/modelFusionFlowPlugin.cjs +0 -103
- package/server/fastify/modelFusionFlowPlugin.d.ts +0 -12
- package/server/fastify/modelFusionFlowPlugin.js +0 -99
- package/util/getAudioFileExtension.cjs +0 -29
- package/util/getAudioFileExtension.d.ts +0 -1
- package/util/getAudioFileExtension.js +0 -25
- /package/{composed-function/summarize/SummarizationFunction.cjs → core/ExtensionFunctionEvent.cjs} +0 -0
- /package/{composed-function/summarize/SummarizationFunction.js → core/ExtensionFunctionEvent.js} +0 -0
- /package/{cost/CostCalculator.js → model-provider/anthropic/AnthropicTextGenerationModel.test.d.ts} +0 -0
- /package/{guard/GuardEvent.js → model-provider/cohere/CohereTextGenerationModel.test.d.ts} +0 -0
- /package/model-provider/{openai/chat/OpenAIChatModel.test.d.ts → llamacpp/LlamaCppTextGenerationModel.test.d.ts} +0 -0
- /package/{server/fastify/AssetStorage.js → model-provider/mistral/MistralChatModel.test.d.ts} +0 -0
- /package/{server/fastify/Flow.js → model-provider/ollama/OllamaChatModel.test.d.ts} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.cjs → OpenAIChatMessage.cjs} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.js → OpenAIChatMessage.js} +0 -0
- /package/{server/fastify/FlowSchema.js → model-provider/openai/OpenAIChatModel.test.d.ts} +0 -0
- /package/{server/fastify/Logger.js → model-provider/openai/OpenAICompletionModel.test.d.ts} +0 -0
- /package/model-provider/openai/{chat/countOpenAIChatMessageTokens.d.ts → countOpenAIChatMessageTokens.d.ts} +0 -0
- /package/{cost/CostCalculator.cjs → tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.cjs} +0 -0
package/model-provider/openai/{chat/AbstractOpenAIChatModel.cjs → AbstractOpenAIChatModel.cjs}
RENAMED
@@ -2,14 +2,14 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.OpenAIChatResponseFormat = exports.AbstractOpenAIChatModel = void 0;
|
4
4
|
const zod_1 = require("zod");
|
5
|
-
const callWithRetryAndThrottle_js_1 = require("
|
6
|
-
const postToApi_js_1 = require("
|
7
|
-
const
|
8
|
-
const
|
9
|
-
const
|
10
|
-
const
|
11
|
-
const
|
12
|
-
const
|
5
|
+
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
6
|
+
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
7
|
+
const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
|
8
|
+
const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
|
9
|
+
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
10
|
+
const createEventSourceResponseHandler_js_1 = require("../../util/streaming/createEventSourceResponseHandler.cjs");
|
11
|
+
const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
|
12
|
+
const OpenAIError_js_1 = require("./OpenAIError.cjs");
|
13
13
|
/**
|
14
14
|
* Abstract text generation model that calls an API that is compatible with the OpenAI chat API.
|
15
15
|
*
|
@@ -105,9 +105,21 @@ class AbstractOpenAIChatModel extends AbstractModel_js_1.AbstractModel {
|
|
105
105
|
doStreamText(prompt, options) {
|
106
106
|
return this.callAPI(prompt, {
|
107
107
|
...options,
|
108
|
-
responseFormat: exports.OpenAIChatResponseFormat.
|
108
|
+
responseFormat: exports.OpenAIChatResponseFormat.deltaIterable,
|
109
109
|
});
|
110
110
|
}
|
111
|
+
extractTextDelta(delta) {
|
112
|
+
const chunk = delta;
|
113
|
+
if (chunk.object !== "chat.completion.chunk") {
|
114
|
+
return undefined;
|
115
|
+
}
|
116
|
+
const chatChunk = chunk;
|
117
|
+
const firstChoice = chatChunk.choices[0];
|
118
|
+
if (firstChoice.index > 0) {
|
119
|
+
return undefined;
|
120
|
+
}
|
121
|
+
return firstChoice.delta.content ?? undefined;
|
122
|
+
}
|
111
123
|
async doGenerateToolCall(tool, prompt, options) {
|
112
124
|
const response = await this.callAPI(prompt, {
|
113
125
|
...options,
|
@@ -220,6 +232,54 @@ const openAIChatResponseSchema = zod_1.z.object({
|
|
220
232
|
total_tokens: zod_1.z.number(),
|
221
233
|
}),
|
222
234
|
});
|
235
|
+
const chatCompletionChunkSchema = zod_1.z.object({
|
236
|
+
object: zod_1.z.literal("chat.completion.chunk"),
|
237
|
+
id: zod_1.z.string(),
|
238
|
+
choices: zod_1.z.array(zod_1.z.object({
|
239
|
+
delta: zod_1.z.object({
|
240
|
+
role: zod_1.z.enum(["assistant", "user"]).optional(),
|
241
|
+
content: zod_1.z.string().nullable().optional(),
|
242
|
+
function_call: zod_1.z
|
243
|
+
.object({
|
244
|
+
name: zod_1.z.string().optional(),
|
245
|
+
arguments: zod_1.z.string().optional(),
|
246
|
+
})
|
247
|
+
.optional(),
|
248
|
+
tool_calls: zod_1.z
|
249
|
+
.array(zod_1.z.object({
|
250
|
+
id: zod_1.z.string(),
|
251
|
+
type: zod_1.z.literal("function"),
|
252
|
+
function: zod_1.z.object({
|
253
|
+
name: zod_1.z.string(),
|
254
|
+
arguments: zod_1.z.string(),
|
255
|
+
}),
|
256
|
+
}))
|
257
|
+
.optional(),
|
258
|
+
}),
|
259
|
+
finish_reason: zod_1.z
|
260
|
+
.enum([
|
261
|
+
"stop",
|
262
|
+
"length",
|
263
|
+
"tool_calls",
|
264
|
+
"content_filter",
|
265
|
+
"function_call",
|
266
|
+
])
|
267
|
+
.nullable()
|
268
|
+
.optional(),
|
269
|
+
index: zod_1.z.number(),
|
270
|
+
})),
|
271
|
+
created: zod_1.z.number(),
|
272
|
+
model: zod_1.z.string(),
|
273
|
+
system_fingerprint: zod_1.z.string().optional().nullable(),
|
274
|
+
});
|
275
|
+
const openaiChatChunkSchema = (0, ZodSchema_js_1.zodSchema)(zod_1.z.union([
|
276
|
+
chatCompletionChunkSchema,
|
277
|
+
zod_1.z.object({
|
278
|
+
object: zod_1.z.string().refine((obj) => obj !== "chat.completion.chunk", {
|
279
|
+
message: "Object must be 'chat.completion.chunk'",
|
280
|
+
}),
|
281
|
+
}),
|
282
|
+
]));
|
223
283
|
exports.OpenAIChatResponseFormat = {
|
224
284
|
/**
|
225
285
|
* Returns the response as a JSON object.
|
@@ -231,12 +291,8 @@ exports.OpenAIChatResponseFormat = {
|
|
231
291
|
/**
|
232
292
|
* Returns an async iterable over the text deltas (only the tex different of the first choice).
|
233
293
|
*/
|
234
|
-
|
235
|
-
stream: true,
|
236
|
-
handler: async ({ response }) => (0, OpenAIChatStreamIterable_js_1.createOpenAIChatDeltaIterableQueue)(response.body, (delta) => delta[0]?.delta?.content ?? ""),
|
237
|
-
},
|
238
|
-
structureDeltaIterable: {
|
294
|
+
deltaIterable: {
|
239
295
|
stream: true,
|
240
|
-
handler:
|
296
|
+
handler: (0, createEventSourceResponseHandler_js_1.createEventSourceResponseHandler)(openaiChatChunkSchema),
|
241
297
|
},
|
242
298
|
};
|
package/model-provider/openai/{chat/AbstractOpenAIChatModel.d.ts → AbstractOpenAIChatModel.d.ts}
RENAMED
@@ -1,12 +1,11 @@
|
|
1
1
|
import { z } from "zod";
|
2
|
-
import { FunctionOptions } from "
|
3
|
-
import { ApiConfiguration } from "
|
4
|
-
import { ResponseHandler } from "
|
5
|
-
import { AbstractModel } from "
|
6
|
-
import {
|
7
|
-
import {
|
8
|
-
import {
|
9
|
-
import { ToolDefinition } from "../../../tool/ToolDefinition.js";
|
2
|
+
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
3
|
+
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
4
|
+
import { ResponseHandler } from "../../core/api/postToApi.js";
|
5
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
6
|
+
import { TextGenerationModelSettings } from "../../model-function/generate-text/TextGenerationModel.js";
|
7
|
+
import { TextGenerationFinishReason } from "../../model-function/generate-text/TextGenerationResult.js";
|
8
|
+
import { ToolDefinition } from "../../tool/ToolDefinition.js";
|
10
9
|
import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
|
11
10
|
export interface AbstractOpenAIChatCallSettings {
|
12
11
|
api?: ApiConfiguration;
|
@@ -138,7 +137,36 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
138
137
|
};
|
139
138
|
}>;
|
140
139
|
private translateFinishReason;
|
141
|
-
doStreamText(prompt: OpenAIChatPrompt, options?: FunctionOptions): Promise<AsyncIterable<Delta<
|
140
|
+
doStreamText(prompt: OpenAIChatPrompt, options?: FunctionOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
|
141
|
+
object: "chat.completion.chunk";
|
142
|
+
model: string;
|
143
|
+
id: string;
|
144
|
+
created: number;
|
145
|
+
choices: {
|
146
|
+
delta: {
|
147
|
+
role?: "user" | "assistant" | undefined;
|
148
|
+
content?: string | null | undefined;
|
149
|
+
function_call?: {
|
150
|
+
name?: string | undefined;
|
151
|
+
arguments?: string | undefined;
|
152
|
+
} | undefined;
|
153
|
+
tool_calls?: {
|
154
|
+
function: {
|
155
|
+
name: string;
|
156
|
+
arguments: string;
|
157
|
+
};
|
158
|
+
type: "function";
|
159
|
+
id: string;
|
160
|
+
}[] | undefined;
|
161
|
+
};
|
162
|
+
index: number;
|
163
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
164
|
+
}[];
|
165
|
+
system_fingerprint?: string | null | undefined;
|
166
|
+
} | {
|
167
|
+
object: string;
|
168
|
+
}>>>;
|
169
|
+
extractTextDelta(delta: unknown): string | undefined;
|
142
170
|
doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: OpenAIChatPrompt, options?: FunctionOptions): Promise<{
|
143
171
|
response: {
|
144
172
|
object: "chat.completion";
|
@@ -437,6 +465,210 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
437
465
|
system_fingerprint?: string | null | undefined;
|
438
466
|
}>;
|
439
467
|
export type OpenAIChatResponse = z.infer<typeof openAIChatResponseSchema>;
|
468
|
+
declare const chatCompletionChunkSchema: z.ZodObject<{
|
469
|
+
object: z.ZodLiteral<"chat.completion.chunk">;
|
470
|
+
id: z.ZodString;
|
471
|
+
choices: z.ZodArray<z.ZodObject<{
|
472
|
+
delta: z.ZodObject<{
|
473
|
+
role: z.ZodOptional<z.ZodEnum<["assistant", "user"]>>;
|
474
|
+
content: z.ZodOptional<z.ZodNullable<z.ZodString>>;
|
475
|
+
function_call: z.ZodOptional<z.ZodObject<{
|
476
|
+
name: z.ZodOptional<z.ZodString>;
|
477
|
+
arguments: z.ZodOptional<z.ZodString>;
|
478
|
+
}, "strip", z.ZodTypeAny, {
|
479
|
+
name?: string | undefined;
|
480
|
+
arguments?: string | undefined;
|
481
|
+
}, {
|
482
|
+
name?: string | undefined;
|
483
|
+
arguments?: string | undefined;
|
484
|
+
}>>;
|
485
|
+
tool_calls: z.ZodOptional<z.ZodArray<z.ZodObject<{
|
486
|
+
id: z.ZodString;
|
487
|
+
type: z.ZodLiteral<"function">;
|
488
|
+
function: z.ZodObject<{
|
489
|
+
name: z.ZodString;
|
490
|
+
arguments: z.ZodString;
|
491
|
+
}, "strip", z.ZodTypeAny, {
|
492
|
+
name: string;
|
493
|
+
arguments: string;
|
494
|
+
}, {
|
495
|
+
name: string;
|
496
|
+
arguments: string;
|
497
|
+
}>;
|
498
|
+
}, "strip", z.ZodTypeAny, {
|
499
|
+
function: {
|
500
|
+
name: string;
|
501
|
+
arguments: string;
|
502
|
+
};
|
503
|
+
type: "function";
|
504
|
+
id: string;
|
505
|
+
}, {
|
506
|
+
function: {
|
507
|
+
name: string;
|
508
|
+
arguments: string;
|
509
|
+
};
|
510
|
+
type: "function";
|
511
|
+
id: string;
|
512
|
+
}>, "many">>;
|
513
|
+
}, "strip", z.ZodTypeAny, {
|
514
|
+
role?: "user" | "assistant" | undefined;
|
515
|
+
content?: string | null | undefined;
|
516
|
+
function_call?: {
|
517
|
+
name?: string | undefined;
|
518
|
+
arguments?: string | undefined;
|
519
|
+
} | undefined;
|
520
|
+
tool_calls?: {
|
521
|
+
function: {
|
522
|
+
name: string;
|
523
|
+
arguments: string;
|
524
|
+
};
|
525
|
+
type: "function";
|
526
|
+
id: string;
|
527
|
+
}[] | undefined;
|
528
|
+
}, {
|
529
|
+
role?: "user" | "assistant" | undefined;
|
530
|
+
content?: string | null | undefined;
|
531
|
+
function_call?: {
|
532
|
+
name?: string | undefined;
|
533
|
+
arguments?: string | undefined;
|
534
|
+
} | undefined;
|
535
|
+
tool_calls?: {
|
536
|
+
function: {
|
537
|
+
name: string;
|
538
|
+
arguments: string;
|
539
|
+
};
|
540
|
+
type: "function";
|
541
|
+
id: string;
|
542
|
+
}[] | undefined;
|
543
|
+
}>;
|
544
|
+
finish_reason: z.ZodOptional<z.ZodNullable<z.ZodEnum<["stop", "length", "tool_calls", "content_filter", "function_call"]>>>;
|
545
|
+
index: z.ZodNumber;
|
546
|
+
}, "strip", z.ZodTypeAny, {
|
547
|
+
delta: {
|
548
|
+
role?: "user" | "assistant" | undefined;
|
549
|
+
content?: string | null | undefined;
|
550
|
+
function_call?: {
|
551
|
+
name?: string | undefined;
|
552
|
+
arguments?: string | undefined;
|
553
|
+
} | undefined;
|
554
|
+
tool_calls?: {
|
555
|
+
function: {
|
556
|
+
name: string;
|
557
|
+
arguments: string;
|
558
|
+
};
|
559
|
+
type: "function";
|
560
|
+
id: string;
|
561
|
+
}[] | undefined;
|
562
|
+
};
|
563
|
+
index: number;
|
564
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
565
|
+
}, {
|
566
|
+
delta: {
|
567
|
+
role?: "user" | "assistant" | undefined;
|
568
|
+
content?: string | null | undefined;
|
569
|
+
function_call?: {
|
570
|
+
name?: string | undefined;
|
571
|
+
arguments?: string | undefined;
|
572
|
+
} | undefined;
|
573
|
+
tool_calls?: {
|
574
|
+
function: {
|
575
|
+
name: string;
|
576
|
+
arguments: string;
|
577
|
+
};
|
578
|
+
type: "function";
|
579
|
+
id: string;
|
580
|
+
}[] | undefined;
|
581
|
+
};
|
582
|
+
index: number;
|
583
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
584
|
+
}>, "many">;
|
585
|
+
created: z.ZodNumber;
|
586
|
+
model: z.ZodString;
|
587
|
+
system_fingerprint: z.ZodNullable<z.ZodOptional<z.ZodString>>;
|
588
|
+
}, "strip", z.ZodTypeAny, {
|
589
|
+
object: "chat.completion.chunk";
|
590
|
+
model: string;
|
591
|
+
id: string;
|
592
|
+
created: number;
|
593
|
+
choices: {
|
594
|
+
delta: {
|
595
|
+
role?: "user" | "assistant" | undefined;
|
596
|
+
content?: string | null | undefined;
|
597
|
+
function_call?: {
|
598
|
+
name?: string | undefined;
|
599
|
+
arguments?: string | undefined;
|
600
|
+
} | undefined;
|
601
|
+
tool_calls?: {
|
602
|
+
function: {
|
603
|
+
name: string;
|
604
|
+
arguments: string;
|
605
|
+
};
|
606
|
+
type: "function";
|
607
|
+
id: string;
|
608
|
+
}[] | undefined;
|
609
|
+
};
|
610
|
+
index: number;
|
611
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
612
|
+
}[];
|
613
|
+
system_fingerprint?: string | null | undefined;
|
614
|
+
}, {
|
615
|
+
object: "chat.completion.chunk";
|
616
|
+
model: string;
|
617
|
+
id: string;
|
618
|
+
created: number;
|
619
|
+
choices: {
|
620
|
+
delta: {
|
621
|
+
role?: "user" | "assistant" | undefined;
|
622
|
+
content?: string | null | undefined;
|
623
|
+
function_call?: {
|
624
|
+
name?: string | undefined;
|
625
|
+
arguments?: string | undefined;
|
626
|
+
} | undefined;
|
627
|
+
tool_calls?: {
|
628
|
+
function: {
|
629
|
+
name: string;
|
630
|
+
arguments: string;
|
631
|
+
};
|
632
|
+
type: "function";
|
633
|
+
id: string;
|
634
|
+
}[] | undefined;
|
635
|
+
};
|
636
|
+
index: number;
|
637
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
638
|
+
}[];
|
639
|
+
system_fingerprint?: string | null | undefined;
|
640
|
+
}>;
|
641
|
+
export type OpenAIChatCompletionChunk = z.infer<typeof chatCompletionChunkSchema>;
|
642
|
+
declare const openaiChatChunkSchema: import("../../core/schema/ZodSchema.js").ZodSchema<{
|
643
|
+
object: "chat.completion.chunk";
|
644
|
+
model: string;
|
645
|
+
id: string;
|
646
|
+
created: number;
|
647
|
+
choices: {
|
648
|
+
delta: {
|
649
|
+
role?: "user" | "assistant" | undefined;
|
650
|
+
content?: string | null | undefined;
|
651
|
+
function_call?: {
|
652
|
+
name?: string | undefined;
|
653
|
+
arguments?: string | undefined;
|
654
|
+
} | undefined;
|
655
|
+
tool_calls?: {
|
656
|
+
function: {
|
657
|
+
name: string;
|
658
|
+
arguments: string;
|
659
|
+
};
|
660
|
+
type: "function";
|
661
|
+
id: string;
|
662
|
+
}[] | undefined;
|
663
|
+
};
|
664
|
+
index: number;
|
665
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
666
|
+
}[];
|
667
|
+
system_fingerprint?: string | null | undefined;
|
668
|
+
} | {
|
669
|
+
object: string;
|
670
|
+
}>;
|
671
|
+
export type OpenAIChatChunk = (typeof openaiChatChunkSchema)["_type"];
|
440
672
|
export type OpenAIChatResponseFormatType<T> = {
|
441
673
|
stream: boolean;
|
442
674
|
handler: ResponseHandler<T>;
|
@@ -446,7 +678,7 @@ export declare const OpenAIChatResponseFormat: {
|
|
446
678
|
* Returns the response as a JSON object.
|
447
679
|
*/
|
448
680
|
json: {
|
449
|
-
stream:
|
681
|
+
stream: boolean;
|
450
682
|
handler: ResponseHandler<{
|
451
683
|
object: "chat.completion";
|
452
684
|
usage: {
|
@@ -484,17 +716,39 @@ export declare const OpenAIChatResponseFormat: {
|
|
484
716
|
/**
|
485
717
|
* Returns an async iterable over the text deltas (only the tex different of the first choice).
|
486
718
|
*/
|
487
|
-
|
488
|
-
stream:
|
719
|
+
deltaIterable: {
|
720
|
+
stream: boolean;
|
489
721
|
handler: ({ response }: {
|
490
722
|
response: Response;
|
491
|
-
}) => Promise<AsyncIterable<Delta<
|
492
|
-
|
493
|
-
|
494
|
-
|
495
|
-
|
496
|
-
|
497
|
-
|
723
|
+
}) => Promise<AsyncIterable<import("../../index.js").Delta<{
|
724
|
+
object: "chat.completion.chunk";
|
725
|
+
model: string;
|
726
|
+
id: string;
|
727
|
+
created: number;
|
728
|
+
choices: {
|
729
|
+
delta: {
|
730
|
+
role?: "user" | "assistant" | undefined;
|
731
|
+
content?: string | null | undefined;
|
732
|
+
function_call?: {
|
733
|
+
name?: string | undefined;
|
734
|
+
arguments?: string | undefined;
|
735
|
+
} | undefined;
|
736
|
+
tool_calls?: {
|
737
|
+
function: {
|
738
|
+
name: string;
|
739
|
+
arguments: string;
|
740
|
+
};
|
741
|
+
type: "function";
|
742
|
+
id: string;
|
743
|
+
}[] | undefined;
|
744
|
+
};
|
745
|
+
index: number;
|
746
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
747
|
+
}[];
|
748
|
+
system_fingerprint?: string | null | undefined;
|
749
|
+
} | {
|
750
|
+
object: string;
|
751
|
+
}>>>;
|
498
752
|
};
|
499
753
|
};
|
500
754
|
export {};
|
package/model-provider/openai/{chat/AbstractOpenAIChatModel.js → AbstractOpenAIChatModel.js}
RENAMED
@@ -1,12 +1,12 @@
|
|
1
1
|
import { z } from "zod";
|
2
|
-
import { callWithRetryAndThrottle } from "
|
3
|
-
import { createJsonResponseHandler, postJsonToApi, } from "
|
4
|
-
import {
|
5
|
-
import {
|
6
|
-
import {
|
7
|
-
import {
|
8
|
-
import {
|
9
|
-
import {
|
2
|
+
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
3
|
+
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
|
+
import { zodSchema } from "../../core/schema/ZodSchema.js";
|
5
|
+
import { parseJSON } from "../../core/schema/parseJSON.js";
|
6
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
7
|
+
import { createEventSourceResponseHandler } from "../../util/streaming/createEventSourceResponseHandler.js";
|
8
|
+
import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
|
9
|
+
import { failedOpenAICallResponseHandler } from "./OpenAIError.js";
|
10
10
|
/**
|
11
11
|
* Abstract text generation model that calls an API that is compatible with the OpenAI chat API.
|
12
12
|
*
|
@@ -102,9 +102,21 @@ export class AbstractOpenAIChatModel extends AbstractModel {
|
|
102
102
|
doStreamText(prompt, options) {
|
103
103
|
return this.callAPI(prompt, {
|
104
104
|
...options,
|
105
|
-
responseFormat: OpenAIChatResponseFormat.
|
105
|
+
responseFormat: OpenAIChatResponseFormat.deltaIterable,
|
106
106
|
});
|
107
107
|
}
|
108
|
+
extractTextDelta(delta) {
|
109
|
+
const chunk = delta;
|
110
|
+
if (chunk.object !== "chat.completion.chunk") {
|
111
|
+
return undefined;
|
112
|
+
}
|
113
|
+
const chatChunk = chunk;
|
114
|
+
const firstChoice = chatChunk.choices[0];
|
115
|
+
if (firstChoice.index > 0) {
|
116
|
+
return undefined;
|
117
|
+
}
|
118
|
+
return firstChoice.delta.content ?? undefined;
|
119
|
+
}
|
108
120
|
async doGenerateToolCall(tool, prompt, options) {
|
109
121
|
const response = await this.callAPI(prompt, {
|
110
122
|
...options,
|
@@ -216,6 +228,54 @@ const openAIChatResponseSchema = z.object({
|
|
216
228
|
total_tokens: z.number(),
|
217
229
|
}),
|
218
230
|
});
|
231
|
+
const chatCompletionChunkSchema = z.object({
|
232
|
+
object: z.literal("chat.completion.chunk"),
|
233
|
+
id: z.string(),
|
234
|
+
choices: z.array(z.object({
|
235
|
+
delta: z.object({
|
236
|
+
role: z.enum(["assistant", "user"]).optional(),
|
237
|
+
content: z.string().nullable().optional(),
|
238
|
+
function_call: z
|
239
|
+
.object({
|
240
|
+
name: z.string().optional(),
|
241
|
+
arguments: z.string().optional(),
|
242
|
+
})
|
243
|
+
.optional(),
|
244
|
+
tool_calls: z
|
245
|
+
.array(z.object({
|
246
|
+
id: z.string(),
|
247
|
+
type: z.literal("function"),
|
248
|
+
function: z.object({
|
249
|
+
name: z.string(),
|
250
|
+
arguments: z.string(),
|
251
|
+
}),
|
252
|
+
}))
|
253
|
+
.optional(),
|
254
|
+
}),
|
255
|
+
finish_reason: z
|
256
|
+
.enum([
|
257
|
+
"stop",
|
258
|
+
"length",
|
259
|
+
"tool_calls",
|
260
|
+
"content_filter",
|
261
|
+
"function_call",
|
262
|
+
])
|
263
|
+
.nullable()
|
264
|
+
.optional(),
|
265
|
+
index: z.number(),
|
266
|
+
})),
|
267
|
+
created: z.number(),
|
268
|
+
model: z.string(),
|
269
|
+
system_fingerprint: z.string().optional().nullable(),
|
270
|
+
});
|
271
|
+
const openaiChatChunkSchema = zodSchema(z.union([
|
272
|
+
chatCompletionChunkSchema,
|
273
|
+
z.object({
|
274
|
+
object: z.string().refine((obj) => obj !== "chat.completion.chunk", {
|
275
|
+
message: "Object must be 'chat.completion.chunk'",
|
276
|
+
}),
|
277
|
+
}),
|
278
|
+
]));
|
219
279
|
export const OpenAIChatResponseFormat = {
|
220
280
|
/**
|
221
281
|
* Returns the response as a JSON object.
|
@@ -227,12 +287,8 @@ export const OpenAIChatResponseFormat = {
|
|
227
287
|
/**
|
228
288
|
* Returns an async iterable over the text deltas (only the tex different of the first choice).
|
229
289
|
*/
|
230
|
-
|
231
|
-
stream: true,
|
232
|
-
handler: async ({ response }) => createOpenAIChatDeltaIterableQueue(response.body, (delta) => delta[0]?.delta?.content ?? ""),
|
233
|
-
},
|
234
|
-
structureDeltaIterable: {
|
290
|
+
deltaIterable: {
|
235
291
|
stream: true,
|
236
|
-
handler:
|
292
|
+
handler: createEventSourceResponseHandler(openaiChatChunkSchema),
|
237
293
|
},
|
238
294
|
};
|
@@ -5,7 +5,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
6
6
|
exports.OpenAIChatFunctionCallStructureGenerationModel = void 0;
|
7
7
|
const secure_json_parse_1 = __importDefault(require("secure-json-parse"));
|
8
|
-
const StructureParseError_js_1 = require("
|
8
|
+
const StructureParseError_js_1 = require("../../model-function/generate-structure/StructureParseError.cjs");
|
9
|
+
const parsePartialJson_js_1 = require("../../model-function/generate-structure/parsePartialJson.cjs");
|
9
10
|
const AbstractOpenAIChatModel_js_1 = require("./AbstractOpenAIChatModel.cjs");
|
10
11
|
const OpenAIChatPromptTemplate_js_1 = require("./OpenAIChatPromptTemplate.cjs");
|
11
12
|
class OpenAIChatFunctionCallStructureGenerationModel {
|
@@ -132,7 +133,7 @@ class OpenAIChatFunctionCallStructureGenerationModel {
|
|
132
133
|
const expandedPrompt = this.promptTemplate.format(prompt);
|
133
134
|
return this.model.callAPI(expandedPrompt, {
|
134
135
|
...options,
|
135
|
-
responseFormat: AbstractOpenAIChatModel_js_1.OpenAIChatResponseFormat.
|
136
|
+
responseFormat: AbstractOpenAIChatModel_js_1.OpenAIChatResponseFormat.deltaIterable,
|
136
137
|
functionCall: { name: this.fnName },
|
137
138
|
functions: [
|
138
139
|
{
|
@@ -143,5 +144,20 @@ class OpenAIChatFunctionCallStructureGenerationModel {
|
|
143
144
|
],
|
144
145
|
});
|
145
146
|
}
|
147
|
+
extractStructureTextDelta(delta) {
|
148
|
+
const chunk = delta;
|
149
|
+
if (chunk.object !== "chat.completion.chunk") {
|
150
|
+
return undefined;
|
151
|
+
}
|
152
|
+
const chatChunk = chunk;
|
153
|
+
const firstChoice = chatChunk.choices[0];
|
154
|
+
if (firstChoice.index > 0) {
|
155
|
+
return undefined;
|
156
|
+
}
|
157
|
+
return firstChoice.delta.function_call?.arguments;
|
158
|
+
}
|
159
|
+
parseAccumulatedStructureText(accumulatedText) {
|
160
|
+
return (0, parsePartialJson_js_1.parsePartialJson)(accumulatedText);
|
161
|
+
}
|
146
162
|
}
|
147
163
|
exports.OpenAIChatFunctionCallStructureGenerationModel = OpenAIChatFunctionCallStructureGenerationModel;
|
@@ -1,11 +1,11 @@
|
|
1
|
-
import { FunctionOptions } from "
|
2
|
-
import { JsonSchemaProducer } from "
|
3
|
-
import { Schema } from "
|
4
|
-
import {
|
5
|
-
import { TextGenerationPromptTemplate } from "
|
1
|
+
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
2
|
+
import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
|
3
|
+
import { Schema } from "../../core/schema/Schema.js";
|
4
|
+
import { StructureStreamingModel } from "../../model-function/generate-structure/StructureGenerationModel.js";
|
5
|
+
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
6
6
|
import { OpenAIChatPrompt } from "./AbstractOpenAIChatModel.js";
|
7
|
-
import { OpenAIChatModel, OpenAIChatSettings } from "./OpenAIChatModel";
|
8
|
-
export declare class OpenAIChatFunctionCallStructureGenerationModel<PROMPT_TEMPLATE extends TextGenerationPromptTemplate<unknown, OpenAIChatPrompt>> implements
|
7
|
+
import { OpenAIChatModel, OpenAIChatSettings } from "./OpenAIChatModel.js";
|
8
|
+
export declare class OpenAIChatFunctionCallStructureGenerationModel<PROMPT_TEMPLATE extends TextGenerationPromptTemplate<unknown, OpenAIChatPrompt>> implements StructureStreamingModel<Parameters<PROMPT_TEMPLATE["format"]>[0], // first argument of the function
|
9
9
|
OpenAIChatSettings> {
|
10
10
|
readonly model: OpenAIChatModel;
|
11
11
|
readonly fnName: string;
|
@@ -17,7 +17,7 @@ OpenAIChatSettings> {
|
|
17
17
|
fnDescription?: string;
|
18
18
|
promptTemplate: PROMPT_TEMPLATE;
|
19
19
|
});
|
20
|
-
get modelInformation(): import("
|
20
|
+
get modelInformation(): import("../../index.js").ModelInformation;
|
21
21
|
get settings(): OpenAIChatSettings;
|
22
22
|
get settingsForEvent(): Partial<OpenAIChatSettings>;
|
23
23
|
/**
|
@@ -27,11 +27,11 @@ OpenAIChatSettings> {
|
|
27
27
|
/**
|
28
28
|
* Returns this model with an instruction prompt template.
|
29
29
|
*/
|
30
|
-
withInstructionPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptTemplate<import("
|
30
|
+
withInstructionPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptTemplate<import("../../index.js").InstructionPrompt, OpenAIChatPrompt>>;
|
31
31
|
/**
|
32
32
|
* Returns this model with a chat prompt template.
|
33
33
|
*/
|
34
|
-
withChatPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptTemplate<import("
|
34
|
+
withChatPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptTemplate<import("../../index.js").ChatPrompt, OpenAIChatPrompt>>;
|
35
35
|
withPromptTemplate<TARGET_PROMPT_FORMAT extends TextGenerationPromptTemplate<unknown, OpenAIChatPrompt>>(promptTemplate: TARGET_PROMPT_FORMAT): OpenAIChatFunctionCallStructureGenerationModel<TARGET_PROMPT_FORMAT>;
|
36
36
|
withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
|
37
37
|
/**
|
@@ -85,5 +85,35 @@ OpenAIChatSettings> {
|
|
85
85
|
};
|
86
86
|
}>;
|
87
87
|
doStreamStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_TEMPLATE["format"]>[0], // first argument of the function
|
88
|
-
options?: FunctionOptions): Promise<AsyncIterable<import("
|
88
|
+
options?: FunctionOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
|
89
|
+
object: "chat.completion.chunk";
|
90
|
+
model: string;
|
91
|
+
id: string;
|
92
|
+
created: number;
|
93
|
+
choices: {
|
94
|
+
delta: {
|
95
|
+
role?: "user" | "assistant" | undefined;
|
96
|
+
content?: string | null | undefined;
|
97
|
+
function_call?: {
|
98
|
+
name?: string | undefined;
|
99
|
+
arguments?: string | undefined;
|
100
|
+
} | undefined;
|
101
|
+
tool_calls?: {
|
102
|
+
function: {
|
103
|
+
name: string;
|
104
|
+
arguments: string;
|
105
|
+
};
|
106
|
+
type: "function";
|
107
|
+
id: string;
|
108
|
+
}[] | undefined;
|
109
|
+
};
|
110
|
+
index: number;
|
111
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
112
|
+
}[];
|
113
|
+
system_fingerprint?: string | null | undefined;
|
114
|
+
} | {
|
115
|
+
object: string;
|
116
|
+
}>>>;
|
117
|
+
extractStructureTextDelta(delta: unknown): string | undefined;
|
118
|
+
parseAccumulatedStructureText(accumulatedText: string): unknown;
|
89
119
|
}
|