modelfusion 0.104.0 → 0.106.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +60 -0
- package/README.md +8 -10
- package/core/DefaultRun.cjs +0 -4
- package/core/DefaultRun.d.ts +0 -2
- package/core/DefaultRun.js +0 -4
- package/core/ExtensionFunctionEvent.d.ts +11 -0
- package/core/FunctionEvent.d.ts +2 -2
- package/extension/index.cjs +22 -3
- package/extension/index.d.ts +5 -1
- package/extension/index.js +4 -1
- package/index.cjs +0 -3
- package/index.d.ts +0 -3
- package/index.js +0 -3
- package/model-function/Delta.d.ts +1 -2
- package/model-function/executeStreamCall.cjs +6 -4
- package/model-function/executeStreamCall.d.ts +2 -2
- package/model-function/executeStreamCall.js +6 -4
- package/model-function/generate-speech/streamSpeech.cjs +1 -2
- package/model-function/generate-speech/streamSpeech.js +1 -2
- package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +25 -29
- package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +3 -1
- package/model-function/generate-structure/StructureFromTextStreamingModel.js +25 -29
- package/model-function/generate-structure/StructureGenerationModel.d.ts +2 -0
- package/model-function/generate-structure/jsonStructurePrompt.cjs +42 -6
- package/model-function/generate-structure/jsonStructurePrompt.d.ts +12 -1
- package/model-function/generate-structure/jsonStructurePrompt.js +42 -5
- package/model-function/generate-structure/streamStructure.cjs +7 -8
- package/model-function/generate-structure/streamStructure.d.ts +1 -1
- package/model-function/generate-structure/streamStructure.js +7 -8
- package/model-function/generate-text/PromptTemplateFullTextModel.cjs +35 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +41 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.js +31 -0
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextGenerationModel.js +1 -1
- package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +3 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextStreamingModel.js +3 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/index.cjs +1 -0
- package/model-function/generate-text/index.d.ts +1 -0
- package/model-function/generate-text/index.js +1 -0
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +2 -2
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +1 -1
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs +8 -5
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.js +7 -4
- package/model-function/generate-text/prompt-template/ChatPrompt.cjs +42 -0
- package/model-function/generate-text/prompt-template/ChatPrompt.d.ts +27 -5
- package/model-function/generate-text/prompt-template/ChatPrompt.js +41 -1
- package/model-function/generate-text/prompt-template/{Content.cjs → ContentPart.cjs} +1 -1
- package/model-function/generate-text/prompt-template/ContentPart.d.ts +30 -0
- package/model-function/generate-text/prompt-template/{Content.js → ContentPart.js} +1 -1
- package/model-function/generate-text/prompt-template/InstructionPrompt.d.ts +3 -2
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +8 -5
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.d.ts +1 -1
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +6 -3
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs +8 -4
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/TextPromptTemplate.cjs +8 -4
- package/model-function/generate-text/prompt-template/TextPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +7 -3
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/index.cjs +1 -1
- package/model-function/generate-text/prompt-template/index.d.ts +1 -1
- package/model-function/generate-text/prompt-template/index.js +1 -1
- package/model-function/generate-text/streamText.cjs +27 -28
- package/model-function/generate-text/streamText.d.ts +1 -0
- package/model-function/generate-text/streamText.js +27 -28
- package/model-function/index.cjs +0 -1
- package/model-function/index.d.ts +0 -1
- package/model-function/index.js +0 -1
- package/model-provider/anthropic/AnthropicPromptTemplate.cjs +7 -3
- package/model-provider/anthropic/AnthropicPromptTemplate.js +5 -1
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +11 -2
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.cjs +44 -0
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.js +42 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -44
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +49 -15
- package/model-provider/cohere/CohereTextGenerationModel.js +7 -45
- package/model-provider/cohere/CohereTextGenerationModel.test.cjs +33 -0
- package/model-provider/cohere/CohereTextGenerationModel.test.js +31 -0
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +1 -2
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +1 -2
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +6 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +6 -1
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +7 -14
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +171 -20
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +8 -15
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.cjs +37 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.js +35 -0
- package/model-provider/mistral/MistralChatModel.cjs +30 -104
- package/model-provider/mistral/MistralChatModel.d.ts +47 -14
- package/model-provider/mistral/MistralChatModel.js +30 -104
- package/model-provider/mistral/MistralChatModel.test.cjs +51 -0
- package/model-provider/mistral/MistralChatModel.test.js +49 -0
- package/model-provider/mistral/MistralPromptTemplate.cjs +11 -4
- package/model-provider/mistral/MistralPromptTemplate.js +9 -2
- package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +13 -13
- package/model-provider/ollama/OllamaChatModel.cjs +7 -43
- package/model-provider/ollama/OllamaChatModel.d.ts +67 -14
- package/model-provider/ollama/OllamaChatModel.js +8 -44
- package/model-provider/ollama/OllamaChatModel.test.cjs +27 -0
- package/model-provider/ollama/OllamaChatModel.test.js +25 -0
- package/model-provider/ollama/OllamaChatPromptTemplate.cjs +34 -4
- package/model-provider/ollama/OllamaChatPromptTemplate.js +34 -4
- package/model-provider/ollama/OllamaCompletionModel.cjs +22 -43
- package/model-provider/ollama/OllamaCompletionModel.d.ts +67 -10
- package/model-provider/ollama/OllamaCompletionModel.js +24 -45
- package/model-provider/ollama/OllamaCompletionModel.test.cjs +95 -13
- package/model-provider/ollama/OllamaCompletionModel.test.js +72 -13
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.cjs → AbstractOpenAIChatModel.cjs} +71 -15
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.d.ts → AbstractOpenAIChatModel.d.ts} +273 -19
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.js → AbstractOpenAIChatModel.js} +71 -15
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.cjs → OpenAIChatFunctionCallStructureGenerationModel.cjs} +18 -2
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts → OpenAIChatFunctionCallStructureGenerationModel.d.ts} +41 -11
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.js → OpenAIChatFunctionCallStructureGenerationModel.js} +18 -2
- package/model-provider/openai/{chat/OpenAIChatMessage.d.ts → OpenAIChatMessage.d.ts} +3 -3
- package/model-provider/openai/{chat/OpenAIChatModel.cjs → OpenAIChatModel.cjs} +5 -5
- package/model-provider/openai/{chat/OpenAIChatModel.d.ts → OpenAIChatModel.d.ts} +12 -12
- package/model-provider/openai/{chat/OpenAIChatModel.js → OpenAIChatModel.js} +5 -5
- package/model-provider/openai/OpenAIChatModel.test.cjs +94 -0
- package/model-provider/openai/OpenAIChatModel.test.js +92 -0
- package/model-provider/openai/OpenAIChatPromptTemplate.cjs +114 -0
- package/model-provider/openai/{chat/OpenAIChatPromptTemplate.d.ts → OpenAIChatPromptTemplate.d.ts} +3 -3
- package/model-provider/openai/OpenAIChatPromptTemplate.js +107 -0
- package/model-provider/openai/OpenAICompletionModel.cjs +32 -84
- package/model-provider/openai/OpenAICompletionModel.d.ts +27 -10
- package/model-provider/openai/OpenAICompletionModel.js +33 -85
- package/model-provider/openai/OpenAICompletionModel.test.cjs +53 -0
- package/model-provider/openai/OpenAICompletionModel.test.js +51 -0
- package/model-provider/openai/OpenAIFacade.cjs +2 -2
- package/model-provider/openai/OpenAIFacade.d.ts +3 -3
- package/model-provider/openai/OpenAIFacade.js +2 -2
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +12 -12
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +17 -17
- package/model-provider/openai/TikTokenTokenizer.d.ts +1 -1
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.cjs → countOpenAIChatMessageTokens.cjs} +2 -2
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.js → countOpenAIChatMessageTokens.js} +2 -2
- package/model-provider/openai/index.cjs +6 -7
- package/model-provider/openai/index.d.ts +5 -7
- package/model-provider/openai/index.js +5 -6
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +4 -4
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +6 -6
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +4 -4
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +5 -5
- package/package.json +13 -24
- package/test/JsonTestServer.cjs +33 -0
- package/test/JsonTestServer.d.ts +7 -0
- package/test/JsonTestServer.js +29 -0
- package/test/StreamingTestServer.cjs +55 -0
- package/test/StreamingTestServer.d.ts +7 -0
- package/test/StreamingTestServer.js +51 -0
- package/test/arrayFromAsync.cjs +13 -0
- package/test/arrayFromAsync.d.ts +1 -0
- package/test/arrayFromAsync.js +9 -0
- package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.js +1 -1
- package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +1 -11
- package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.d.ts +12 -0
- package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js +1 -0
- package/tool/generate-tool-calls-or-text/index.cjs +1 -0
- package/tool/generate-tool-calls-or-text/index.d.ts +1 -0
- package/tool/generate-tool-calls-or-text/index.js +1 -0
- package/util/index.cjs +0 -1
- package/util/index.d.ts +0 -1
- package/util/index.js +0 -1
- package/util/streaming/createEventSourceResponseHandler.cjs +9 -0
- package/util/streaming/createEventSourceResponseHandler.d.ts +4 -0
- package/util/streaming/createEventSourceResponseHandler.js +5 -0
- package/util/streaming/createJsonStreamResponseHandler.cjs +9 -0
- package/util/streaming/createJsonStreamResponseHandler.d.ts +4 -0
- package/util/streaming/createJsonStreamResponseHandler.js +5 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.cjs +52 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.js +48 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.cjs +21 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.js +17 -0
- package/browser/MediaSourceAppender.cjs +0 -54
- package/browser/MediaSourceAppender.d.ts +0 -11
- package/browser/MediaSourceAppender.js +0 -50
- package/browser/convertAudioChunksToBase64.cjs +0 -8
- package/browser/convertAudioChunksToBase64.d.ts +0 -4
- package/browser/convertAudioChunksToBase64.js +0 -4
- package/browser/convertBlobToBase64.cjs +0 -23
- package/browser/convertBlobToBase64.d.ts +0 -1
- package/browser/convertBlobToBase64.js +0 -19
- package/browser/index.cjs +0 -22
- package/browser/index.d.ts +0 -6
- package/browser/index.js +0 -6
- package/browser/invokeFlow.cjs +0 -23
- package/browser/invokeFlow.d.ts +0 -8
- package/browser/invokeFlow.js +0 -19
- package/browser/readEventSource.cjs +0 -29
- package/browser/readEventSource.d.ts +0 -9
- package/browser/readEventSource.js +0 -25
- package/browser/readEventSourceStream.cjs +0 -35
- package/browser/readEventSourceStream.d.ts +0 -7
- package/browser/readEventSourceStream.js +0 -31
- package/composed-function/index.cjs +0 -19
- package/composed-function/index.d.ts +0 -3
- package/composed-function/index.js +0 -3
- package/composed-function/summarize/SummarizationFunction.d.ts +0 -4
- package/composed-function/summarize/summarizeRecursively.cjs +0 -19
- package/composed-function/summarize/summarizeRecursively.d.ts +0 -11
- package/composed-function/summarize/summarizeRecursively.js +0 -15
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +0 -25
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +0 -24
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +0 -21
- package/cost/Cost.cjs +0 -38
- package/cost/Cost.d.ts +0 -16
- package/cost/Cost.js +0 -34
- package/cost/CostCalculator.d.ts +0 -8
- package/cost/calculateCost.cjs +0 -28
- package/cost/calculateCost.d.ts +0 -7
- package/cost/calculateCost.js +0 -24
- package/cost/index.cjs +0 -19
- package/cost/index.d.ts +0 -3
- package/cost/index.js +0 -3
- package/guard/GuardEvent.cjs +0 -2
- package/guard/GuardEvent.d.ts +0 -7
- package/guard/fixStructure.cjs +0 -75
- package/guard/fixStructure.d.ts +0 -64
- package/guard/fixStructure.js +0 -71
- package/guard/guard.cjs +0 -79
- package/guard/guard.d.ts +0 -29
- package/guard/guard.js +0 -75
- package/guard/index.cjs +0 -19
- package/guard/index.d.ts +0 -3
- package/guard/index.js +0 -3
- package/model-function/SuccessfulModelCall.cjs +0 -10
- package/model-function/SuccessfulModelCall.d.ts +0 -12
- package/model-function/SuccessfulModelCall.js +0 -6
- package/model-function/generate-text/prompt-template/Content.d.ts +0 -25
- package/model-provider/openai/OpenAICostCalculator.cjs +0 -89
- package/model-provider/openai/OpenAICostCalculator.d.ts +0 -6
- package/model-provider/openai/OpenAICostCalculator.js +0 -85
- package/model-provider/openai/chat/OpenAIChatModel.test.cjs +0 -61
- package/model-provider/openai/chat/OpenAIChatModel.test.js +0 -59
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.cjs +0 -70
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.js +0 -63
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +0 -156
- package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +0 -19
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +0 -152
- package/server/fastify/AssetStorage.cjs +0 -2
- package/server/fastify/AssetStorage.d.ts +0 -17
- package/server/fastify/DefaultFlow.cjs +0 -22
- package/server/fastify/DefaultFlow.d.ts +0 -16
- package/server/fastify/DefaultFlow.js +0 -18
- package/server/fastify/FileSystemAssetStorage.cjs +0 -60
- package/server/fastify/FileSystemAssetStorage.d.ts +0 -19
- package/server/fastify/FileSystemAssetStorage.js +0 -56
- package/server/fastify/FileSystemLogger.cjs +0 -49
- package/server/fastify/FileSystemLogger.d.ts +0 -18
- package/server/fastify/FileSystemLogger.js +0 -45
- package/server/fastify/Flow.cjs +0 -2
- package/server/fastify/Flow.d.ts +0 -9
- package/server/fastify/FlowRun.cjs +0 -71
- package/server/fastify/FlowRun.d.ts +0 -28
- package/server/fastify/FlowRun.js +0 -67
- package/server/fastify/FlowSchema.cjs +0 -2
- package/server/fastify/FlowSchema.d.ts +0 -5
- package/server/fastify/Logger.cjs +0 -2
- package/server/fastify/Logger.d.ts +0 -13
- package/server/fastify/PathProvider.cjs +0 -34
- package/server/fastify/PathProvider.d.ts +0 -12
- package/server/fastify/PathProvider.js +0 -30
- package/server/fastify/index.cjs +0 -24
- package/server/fastify/index.d.ts +0 -8
- package/server/fastify/index.js +0 -8
- package/server/fastify/modelFusionFlowPlugin.cjs +0 -103
- package/server/fastify/modelFusionFlowPlugin.d.ts +0 -12
- package/server/fastify/modelFusionFlowPlugin.js +0 -99
- package/util/getAudioFileExtension.cjs +0 -29
- package/util/getAudioFileExtension.d.ts +0 -1
- package/util/getAudioFileExtension.js +0 -25
- /package/{composed-function/summarize/SummarizationFunction.cjs → core/ExtensionFunctionEvent.cjs} +0 -0
- /package/{composed-function/summarize/SummarizationFunction.js → core/ExtensionFunctionEvent.js} +0 -0
- /package/{cost/CostCalculator.js → model-provider/anthropic/AnthropicTextGenerationModel.test.d.ts} +0 -0
- /package/{guard/GuardEvent.js → model-provider/cohere/CohereTextGenerationModel.test.d.ts} +0 -0
- /package/model-provider/{openai/chat/OpenAIChatModel.test.d.ts → llamacpp/LlamaCppTextGenerationModel.test.d.ts} +0 -0
- /package/{server/fastify/AssetStorage.js → model-provider/mistral/MistralChatModel.test.d.ts} +0 -0
- /package/{server/fastify/Flow.js → model-provider/ollama/OllamaChatModel.test.d.ts} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.cjs → OpenAIChatMessage.cjs} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.js → OpenAIChatMessage.js} +0 -0
- /package/{server/fastify/FlowSchema.js → model-provider/openai/OpenAIChatModel.test.d.ts} +0 -0
- /package/{server/fastify/Logger.js → model-provider/openai/OpenAICompletionModel.test.d.ts} +0 -0
- /package/model-provider/openai/{chat/countOpenAIChatMessageTokens.d.ts → countOpenAIChatMessageTokens.d.ts} +0 -0
- /package/{cost/CostCalculator.cjs → tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.cjs} +0 -0
@@ -85,7 +85,7 @@ class AnthropicTextGenerationModel extends AbstractModel_js_1.AbstractModel {
|
|
85
85
|
model: this.settings.model,
|
86
86
|
prompt,
|
87
87
|
stream: responseFormat.stream,
|
88
|
-
max_tokens_to_sample: this.settings.maxGenerationTokens,
|
88
|
+
max_tokens_to_sample: this.settings.maxGenerationTokens ?? 100,
|
89
89
|
temperature: this.settings.temperature,
|
90
90
|
top_k: this.settings.topK,
|
91
91
|
top_p: this.settings.topP,
|
@@ -140,6 +140,10 @@ class AnthropicTextGenerationModel extends AbstractModel_js_1.AbstractModel {
|
|
140
140
|
responseFormat: exports.AnthropicTextGenerationResponseFormat.deltaIterable,
|
141
141
|
});
|
142
142
|
}
|
143
|
+
extractTextDelta(delta) {
|
144
|
+
const chunk = delta;
|
145
|
+
return chunk.completion;
|
146
|
+
}
|
143
147
|
/**
|
144
148
|
* Returns this model with a text prompt template.
|
145
149
|
*/
|
@@ -179,14 +183,13 @@ const anthropicTextGenerationResponseSchema = zod_1.z.object({
|
|
179
183
|
stop_reason: zod_1.z.string(),
|
180
184
|
model: zod_1.z.string(),
|
181
185
|
});
|
182
|
-
const
|
186
|
+
const anthropicTextStreamChunkSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.object({
|
183
187
|
completion: zod_1.z.string(),
|
184
188
|
stop_reason: zod_1.z.string().nullable(),
|
185
189
|
model: zod_1.z.string(),
|
186
190
|
}));
|
187
191
|
async function createAnthropicFullDeltaIterableQueue(stream) {
|
188
192
|
const queue = new AsyncQueue_js_1.AsyncQueue();
|
189
|
-
let content = "";
|
190
193
|
// process the stream asynchonously (no 'await' on purpose):
|
191
194
|
(0, parseEventSourceStream_js_1.parseEventSourceStream)({ stream })
|
192
195
|
.then(async (events) => {
|
@@ -203,18 +206,9 @@ async function createAnthropicFullDeltaIterableQueue(stream) {
|
|
203
206
|
const data = event.data;
|
204
207
|
const eventData = (0, parseJSON_js_1.parseJSON)({
|
205
208
|
text: data,
|
206
|
-
schema:
|
207
|
-
});
|
208
|
-
content += eventData.completion;
|
209
|
-
queue.push({
|
210
|
-
type: "delta",
|
211
|
-
fullDelta: {
|
212
|
-
content,
|
213
|
-
isComplete: eventData.stop_reason != null,
|
214
|
-
delta: eventData.completion,
|
215
|
-
},
|
216
|
-
valueDelta: eventData.completion,
|
209
|
+
schema: anthropicTextStreamChunkSchema,
|
217
210
|
});
|
211
|
+
queue.push({ type: "delta", deltaValue: eventData });
|
218
212
|
if (eventData.stop_reason != null) {
|
219
213
|
queue.close();
|
220
214
|
}
|
@@ -62,7 +62,12 @@ export declare class AnthropicTextGenerationModel extends AbstractModel<Anthropi
|
|
62
62
|
}[];
|
63
63
|
}>;
|
64
64
|
private translateFinishReason;
|
65
|
-
doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<
|
65
|
+
doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<{
|
66
|
+
model: string;
|
67
|
+
completion: string;
|
68
|
+
stop_reason: string | null;
|
69
|
+
}>>>;
|
70
|
+
extractTextDelta(delta: unknown): string;
|
66
71
|
/**
|
67
72
|
* Returns this model with a text prompt template.
|
68
73
|
*/
|
@@ -116,7 +121,11 @@ export declare const AnthropicTextGenerationResponseFormat: {
|
|
116
121
|
stream: true;
|
117
122
|
handler: ({ response }: {
|
118
123
|
response: Response;
|
119
|
-
}) => Promise<AsyncIterable<Delta<
|
124
|
+
}) => Promise<AsyncIterable<Delta<{
|
125
|
+
model: string;
|
126
|
+
completion: string;
|
127
|
+
stop_reason: string | null;
|
128
|
+
}>>>;
|
120
129
|
};
|
121
130
|
};
|
122
131
|
export {};
|
@@ -82,7 +82,7 @@ export class AnthropicTextGenerationModel extends AbstractModel {
|
|
82
82
|
model: this.settings.model,
|
83
83
|
prompt,
|
84
84
|
stream: responseFormat.stream,
|
85
|
-
max_tokens_to_sample: this.settings.maxGenerationTokens,
|
85
|
+
max_tokens_to_sample: this.settings.maxGenerationTokens ?? 100,
|
86
86
|
temperature: this.settings.temperature,
|
87
87
|
top_k: this.settings.topK,
|
88
88
|
top_p: this.settings.topP,
|
@@ -137,6 +137,10 @@ export class AnthropicTextGenerationModel extends AbstractModel {
|
|
137
137
|
responseFormat: AnthropicTextGenerationResponseFormat.deltaIterable,
|
138
138
|
});
|
139
139
|
}
|
140
|
+
extractTextDelta(delta) {
|
141
|
+
const chunk = delta;
|
142
|
+
return chunk.completion;
|
143
|
+
}
|
140
144
|
/**
|
141
145
|
* Returns this model with a text prompt template.
|
142
146
|
*/
|
@@ -175,14 +179,13 @@ const anthropicTextGenerationResponseSchema = z.object({
|
|
175
179
|
stop_reason: z.string(),
|
176
180
|
model: z.string(),
|
177
181
|
});
|
178
|
-
const
|
182
|
+
const anthropicTextStreamChunkSchema = new ZodSchema(z.object({
|
179
183
|
completion: z.string(),
|
180
184
|
stop_reason: z.string().nullable(),
|
181
185
|
model: z.string(),
|
182
186
|
}));
|
183
187
|
async function createAnthropicFullDeltaIterableQueue(stream) {
|
184
188
|
const queue = new AsyncQueue();
|
185
|
-
let content = "";
|
186
189
|
// process the stream asynchonously (no 'await' on purpose):
|
187
190
|
parseEventSourceStream({ stream })
|
188
191
|
.then(async (events) => {
|
@@ -199,18 +202,9 @@ async function createAnthropicFullDeltaIterableQueue(stream) {
|
|
199
202
|
const data = event.data;
|
200
203
|
const eventData = parseJSON({
|
201
204
|
text: data,
|
202
|
-
schema:
|
203
|
-
});
|
204
|
-
content += eventData.completion;
|
205
|
-
queue.push({
|
206
|
-
type: "delta",
|
207
|
-
fullDelta: {
|
208
|
-
content,
|
209
|
-
isComplete: eventData.stop_reason != null,
|
210
|
-
delta: eventData.completion,
|
211
|
-
},
|
212
|
-
valueDelta: eventData.completion,
|
205
|
+
schema: anthropicTextStreamChunkSchema,
|
213
206
|
});
|
207
|
+
queue.push({ type: "delta", deltaValue: eventData });
|
214
208
|
if (eventData.stop_reason != null) {
|
215
209
|
queue.close();
|
216
210
|
}
|
@@ -0,0 +1,44 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
const streamText_js_1 = require("../../model-function/generate-text/streamText.cjs");
|
4
|
+
const StreamingTestServer_js_1 = require("../../test/StreamingTestServer.cjs");
|
5
|
+
const arrayFromAsync_js_1 = require("../../test/arrayFromAsync.cjs");
|
6
|
+
const AnthropicApiConfiguration_js_1 = require("./AnthropicApiConfiguration.cjs");
|
7
|
+
const AnthropicTextGenerationModel_js_1 = require("./AnthropicTextGenerationModel.cjs");
|
8
|
+
describe("streamText", () => {
|
9
|
+
const server = new StreamingTestServer_js_1.StreamingTestServer("https://api.anthropic.com/v1/complete");
|
10
|
+
server.setupTestEnvironment();
|
11
|
+
it("should return a text stream", async () => {
|
12
|
+
server.responseChunks = [
|
13
|
+
`event: completion\n` +
|
14
|
+
`data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
|
15
|
+
`"completion":" Hello","stop_reason":null,"model":"claude-instant-1.2",` +
|
16
|
+
`"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
|
17
|
+
`event: completion\n` +
|
18
|
+
`data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
|
19
|
+
`"completion":", ","stop_reason":null,"model":"claude-instant-1.2",` +
|
20
|
+
`"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
|
21
|
+
`event: completion\n` +
|
22
|
+
`data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
|
23
|
+
`"completion":"world!","stop_reason":null,"model":"claude-instant-1.2",` +
|
24
|
+
`"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
|
25
|
+
`event: ping\ndata: {"type": "ping"}\n\n`,
|
26
|
+
`event: completion\n` +
|
27
|
+
`data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
|
28
|
+
`"completion":"","stop_reason":"stop_sequence","model":"claude-instant-1.2",` +
|
29
|
+
`"stop":"\\n\\nHuman:","log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
|
30
|
+
];
|
31
|
+
const stream = await (0, streamText_js_1.streamText)(new AnthropicTextGenerationModel_js_1.AnthropicTextGenerationModel({
|
32
|
+
api: new AnthropicApiConfiguration_js_1.AnthropicApiConfiguration({
|
33
|
+
apiKey: "test-key",
|
34
|
+
}),
|
35
|
+
model: "claude-instant-1",
|
36
|
+
}).withTextPrompt(), "hello");
|
37
|
+
// note: space moved to last chunk bc of trimming
|
38
|
+
expect(await (0, arrayFromAsync_js_1.arrayFromAsync)(stream)).toStrictEqual([
|
39
|
+
"Hello",
|
40
|
+
",",
|
41
|
+
" world!",
|
42
|
+
]);
|
43
|
+
});
|
44
|
+
});
|
@@ -0,0 +1,42 @@
|
|
1
|
+
import { streamText } from "../../model-function/generate-text/streamText.js";
|
2
|
+
import { StreamingTestServer } from "../../test/StreamingTestServer.js";
|
3
|
+
import { arrayFromAsync } from "../../test/arrayFromAsync.js";
|
4
|
+
import { AnthropicApiConfiguration } from "./AnthropicApiConfiguration.js";
|
5
|
+
import { AnthropicTextGenerationModel } from "./AnthropicTextGenerationModel.js";
|
6
|
+
describe("streamText", () => {
|
7
|
+
const server = new StreamingTestServer("https://api.anthropic.com/v1/complete");
|
8
|
+
server.setupTestEnvironment();
|
9
|
+
it("should return a text stream", async () => {
|
10
|
+
server.responseChunks = [
|
11
|
+
`event: completion\n` +
|
12
|
+
`data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
|
13
|
+
`"completion":" Hello","stop_reason":null,"model":"claude-instant-1.2",` +
|
14
|
+
`"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
|
15
|
+
`event: completion\n` +
|
16
|
+
`data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
|
17
|
+
`"completion":", ","stop_reason":null,"model":"claude-instant-1.2",` +
|
18
|
+
`"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
|
19
|
+
`event: completion\n` +
|
20
|
+
`data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
|
21
|
+
`"completion":"world!","stop_reason":null,"model":"claude-instant-1.2",` +
|
22
|
+
`"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
|
23
|
+
`event: ping\ndata: {"type": "ping"}\n\n`,
|
24
|
+
`event: completion\n` +
|
25
|
+
`data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
|
26
|
+
`"completion":"","stop_reason":"stop_sequence","model":"claude-instant-1.2",` +
|
27
|
+
`"stop":"\\n\\nHuman:","log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
|
28
|
+
];
|
29
|
+
const stream = await streamText(new AnthropicTextGenerationModel({
|
30
|
+
api: new AnthropicApiConfiguration({
|
31
|
+
apiKey: "test-key",
|
32
|
+
}),
|
33
|
+
model: "claude-instant-1",
|
34
|
+
}).withTextPrompt(), "hello");
|
35
|
+
// note: space moved to last chunk bc of trimming
|
36
|
+
expect(await arrayFromAsync(stream)).toStrictEqual([
|
37
|
+
"Hello",
|
38
|
+
",",
|
39
|
+
" world!",
|
40
|
+
]);
|
41
|
+
});
|
42
|
+
});
|
@@ -74,8 +74,8 @@ export declare class CohereTextEmbeddingModel extends AbstractModel<CohereTextEm
|
|
74
74
|
get settingsForEvent(): Partial<CohereTextEmbeddingModelSettings>;
|
75
75
|
doEmbedValues(texts: string[], options?: FunctionOptions): Promise<{
|
76
76
|
response: {
|
77
|
-
texts: string[];
|
78
77
|
embeddings: number[][];
|
78
|
+
texts: string[];
|
79
79
|
id: string;
|
80
80
|
meta: {
|
81
81
|
api_version: {
|
@@ -109,8 +109,8 @@ declare const cohereTextEmbeddingResponseSchema: z.ZodObject<{
|
|
109
109
|
};
|
110
110
|
}>;
|
111
111
|
}, "strip", z.ZodTypeAny, {
|
112
|
-
texts: string[];
|
113
112
|
embeddings: number[][];
|
113
|
+
texts: string[];
|
114
114
|
id: string;
|
115
115
|
meta: {
|
116
116
|
api_version: {
|
@@ -118,8 +118,8 @@ declare const cohereTextEmbeddingResponseSchema: z.ZodObject<{
|
|
118
118
|
};
|
119
119
|
};
|
120
120
|
}, {
|
121
|
-
texts: string[];
|
122
121
|
embeddings: number[][];
|
122
|
+
texts: string[];
|
123
123
|
id: string;
|
124
124
|
meta: {
|
125
125
|
api_version: {
|
@@ -10,8 +10,7 @@ const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/gene
|
|
10
10
|
const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
|
11
11
|
const TextPromptTemplate_js_1 = require("../../model-function/generate-text/prompt-template/TextPromptTemplate.cjs");
|
12
12
|
const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
|
13
|
-
const
|
14
|
-
const parseJsonStream_js_1 = require("../../util/streaming/parseJsonStream.cjs");
|
13
|
+
const createJsonStreamResponseHandler_js_1 = require("../../util/streaming/createJsonStreamResponseHandler.cjs");
|
15
14
|
const CohereApiConfiguration_js_1 = require("./CohereApiConfiguration.cjs");
|
16
15
|
const CohereError_js_1 = require("./CohereError.cjs");
|
17
16
|
const CohereTokenizer_js_1 = require("./CohereTokenizer.cjs");
|
@@ -163,8 +162,9 @@ class CohereTextGenerationModel extends AbstractModel_js_1.AbstractModel {
|
|
163
162
|
responseFormat: exports.CohereTextGenerationResponseFormat.deltaIterable,
|
164
163
|
});
|
165
164
|
}
|
166
|
-
extractTextDelta(
|
167
|
-
|
165
|
+
extractTextDelta(delta) {
|
166
|
+
const chunk = delta;
|
167
|
+
return chunk.is_finished === true ? "" : chunk.text;
|
168
168
|
}
|
169
169
|
/**
|
170
170
|
* Returns this model with an instruction prompt template.
|
@@ -210,7 +210,7 @@ const cohereTextGenerationResponseSchema = zod_1.z.object({
|
|
210
210
|
})
|
211
211
|
.optional(),
|
212
212
|
});
|
213
|
-
const
|
213
|
+
const cohereTextStreamChunkSchema = (0, ZodSchema_js_1.zodSchema)(zod_1.z.discriminatedUnion("is_finished", [
|
214
214
|
zod_1.z.object({
|
215
215
|
text: zod_1.z.string(),
|
216
216
|
is_finished: zod_1.z.literal(false),
|
@@ -221,44 +221,6 @@ const cohereTextStreamingResponseSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.d
|
|
221
221
|
response: cohereTextGenerationResponseSchema,
|
222
222
|
}),
|
223
223
|
]));
|
224
|
-
async function createCohereTextGenerationFullDeltaIterableQueue(stream) {
|
225
|
-
const queue = new AsyncQueue_js_1.AsyncQueue();
|
226
|
-
let accumulatedText = "";
|
227
|
-
// process the stream asynchonously (no 'await' on purpose):
|
228
|
-
(0, parseJsonStream_js_1.parseJsonStream)({
|
229
|
-
stream,
|
230
|
-
schema: cohereTextStreamingResponseSchema,
|
231
|
-
process(event) {
|
232
|
-
if (event.is_finished === true) {
|
233
|
-
queue.push({
|
234
|
-
type: "delta",
|
235
|
-
fullDelta: {
|
236
|
-
content: accumulatedText,
|
237
|
-
isComplete: true,
|
238
|
-
delta: "",
|
239
|
-
},
|
240
|
-
valueDelta: "",
|
241
|
-
});
|
242
|
-
}
|
243
|
-
else {
|
244
|
-
accumulatedText += event.text;
|
245
|
-
queue.push({
|
246
|
-
type: "delta",
|
247
|
-
fullDelta: {
|
248
|
-
content: accumulatedText,
|
249
|
-
isComplete: false,
|
250
|
-
delta: event.text,
|
251
|
-
},
|
252
|
-
valueDelta: event.text,
|
253
|
-
});
|
254
|
-
}
|
255
|
-
},
|
256
|
-
onDone() {
|
257
|
-
queue.close();
|
258
|
-
},
|
259
|
-
});
|
260
|
-
return queue;
|
261
|
-
}
|
262
224
|
exports.CohereTextGenerationResponseFormat = {
|
263
225
|
/**
|
264
226
|
* Returns the response as a JSON object.
|
@@ -273,6 +235,6 @@ exports.CohereTextGenerationResponseFormat = {
|
|
273
235
|
*/
|
274
236
|
deltaIterable: {
|
275
237
|
stream: true,
|
276
|
-
handler:
|
238
|
+
handler: (0, createJsonStreamResponseHandler_js_1.createJsonStreamResponseHandler)(cohereTextStreamChunkSchema),
|
277
239
|
},
|
278
240
|
};
|
@@ -3,7 +3,6 @@ import { FunctionOptions } from "../../core/FunctionOptions.js";
|
|
3
3
|
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
4
4
|
import { ResponseHandler } from "../../core/api/postToApi.js";
|
5
5
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
6
|
-
import { Delta } from "../../model-function/Delta.js";
|
7
6
|
import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
|
8
7
|
import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
|
9
8
|
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
@@ -67,8 +66,8 @@ export declare class CohereTextGenerationModel extends AbstractModel<CohereTextG
|
|
67
66
|
get settingsForEvent(): Partial<CohereTextGenerationModelSettings>;
|
68
67
|
doGenerateTexts(prompt: string, options?: FunctionOptions): Promise<{
|
69
68
|
response: {
|
70
|
-
prompt: string;
|
71
69
|
id: string;
|
70
|
+
prompt: string;
|
72
71
|
generations: {
|
73
72
|
text: string;
|
74
73
|
id: string;
|
@@ -86,8 +85,28 @@ export declare class CohereTextGenerationModel extends AbstractModel<CohereTextG
|
|
86
85
|
}[];
|
87
86
|
}>;
|
88
87
|
private translateFinishReason;
|
89
|
-
doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<
|
90
|
-
|
88
|
+
doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
|
89
|
+
text: string;
|
90
|
+
is_finished: false;
|
91
|
+
} | {
|
92
|
+
response: {
|
93
|
+
id: string;
|
94
|
+
prompt: string;
|
95
|
+
generations: {
|
96
|
+
text: string;
|
97
|
+
id: string;
|
98
|
+
finish_reason?: string | undefined;
|
99
|
+
}[];
|
100
|
+
meta?: {
|
101
|
+
api_version: {
|
102
|
+
version: string;
|
103
|
+
};
|
104
|
+
} | undefined;
|
105
|
+
};
|
106
|
+
finish_reason: string;
|
107
|
+
is_finished: true;
|
108
|
+
}>>>;
|
109
|
+
extractTextDelta(delta: unknown): string;
|
91
110
|
/**
|
92
111
|
* Returns this model with an instruction prompt template.
|
93
112
|
*/
|
@@ -136,8 +155,8 @@ declare const cohereTextGenerationResponseSchema: z.ZodObject<{
|
|
136
155
|
};
|
137
156
|
}>>;
|
138
157
|
}, "strip", z.ZodTypeAny, {
|
139
|
-
prompt: string;
|
140
158
|
id: string;
|
159
|
+
prompt: string;
|
141
160
|
generations: {
|
142
161
|
text: string;
|
143
162
|
id: string;
|
@@ -149,8 +168,8 @@ declare const cohereTextGenerationResponseSchema: z.ZodObject<{
|
|
149
168
|
};
|
150
169
|
} | undefined;
|
151
170
|
}, {
|
152
|
-
prompt: string;
|
153
171
|
id: string;
|
172
|
+
prompt: string;
|
154
173
|
generations: {
|
155
174
|
text: string;
|
156
175
|
id: string;
|
@@ -163,11 +182,6 @@ declare const cohereTextGenerationResponseSchema: z.ZodObject<{
|
|
163
182
|
} | undefined;
|
164
183
|
}>;
|
165
184
|
export type CohereTextGenerationResponse = z.infer<typeof cohereTextGenerationResponseSchema>;
|
166
|
-
export type CohereTextGenerationDelta = {
|
167
|
-
content: string;
|
168
|
-
isComplete: boolean;
|
169
|
-
delta: string;
|
170
|
-
};
|
171
185
|
export type CohereTextGenerationResponseFormatType<T> = {
|
172
186
|
stream: boolean;
|
173
187
|
handler: ResponseHandler<T>;
|
@@ -177,10 +191,10 @@ export declare const CohereTextGenerationResponseFormat: {
|
|
177
191
|
* Returns the response as a JSON object.
|
178
192
|
*/
|
179
193
|
json: {
|
180
|
-
stream:
|
194
|
+
stream: boolean;
|
181
195
|
handler: ResponseHandler<{
|
182
|
-
prompt: string;
|
183
196
|
id: string;
|
197
|
+
prompt: string;
|
184
198
|
generations: {
|
185
199
|
text: string;
|
186
200
|
id: string;
|
@@ -198,10 +212,30 @@ export declare const CohereTextGenerationResponseFormat: {
|
|
198
212
|
* of the response stream.
|
199
213
|
*/
|
200
214
|
deltaIterable: {
|
201
|
-
stream:
|
215
|
+
stream: boolean;
|
202
216
|
handler: ({ response }: {
|
203
217
|
response: Response;
|
204
|
-
}) => Promise<AsyncIterable<Delta<
|
218
|
+
}) => Promise<AsyncIterable<import("../../index.js").Delta<{
|
219
|
+
text: string;
|
220
|
+
is_finished: false;
|
221
|
+
} | {
|
222
|
+
response: {
|
223
|
+
id: string;
|
224
|
+
prompt: string;
|
225
|
+
generations: {
|
226
|
+
text: string;
|
227
|
+
id: string;
|
228
|
+
finish_reason?: string | undefined;
|
229
|
+
}[];
|
230
|
+
meta?: {
|
231
|
+
api_version: {
|
232
|
+
version: string;
|
233
|
+
};
|
234
|
+
} | undefined;
|
235
|
+
};
|
236
|
+
finish_reason: string;
|
237
|
+
is_finished: true;
|
238
|
+
}>>>;
|
205
239
|
};
|
206
240
|
};
|
207
241
|
export {};
|
@@ -1,14 +1,13 @@
|
|
1
1
|
import { z } from "zod";
|
2
2
|
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
3
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
|
-
import {
|
4
|
+
import { zodSchema } from "../../core/schema/ZodSchema.js";
|
5
5
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
6
6
|
import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
|
7
7
|
import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
|
8
8
|
import { chat, instruction, } from "../../model-function/generate-text/prompt-template/TextPromptTemplate.js";
|
9
9
|
import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
|
10
|
-
import {
|
11
|
-
import { parseJsonStream } from "../../util/streaming/parseJsonStream.js";
|
10
|
+
import { createJsonStreamResponseHandler } from "../../util/streaming/createJsonStreamResponseHandler.js";
|
12
11
|
import { CohereApiConfiguration } from "./CohereApiConfiguration.js";
|
13
12
|
import { failedCohereCallResponseHandler } from "./CohereError.js";
|
14
13
|
import { CohereTokenizer } from "./CohereTokenizer.js";
|
@@ -160,8 +159,9 @@ export class CohereTextGenerationModel extends AbstractModel {
|
|
160
159
|
responseFormat: CohereTextGenerationResponseFormat.deltaIterable,
|
161
160
|
});
|
162
161
|
}
|
163
|
-
extractTextDelta(
|
164
|
-
|
162
|
+
extractTextDelta(delta) {
|
163
|
+
const chunk = delta;
|
164
|
+
return chunk.is_finished === true ? "" : chunk.text;
|
165
165
|
}
|
166
166
|
/**
|
167
167
|
* Returns this model with an instruction prompt template.
|
@@ -206,7 +206,7 @@ const cohereTextGenerationResponseSchema = z.object({
|
|
206
206
|
})
|
207
207
|
.optional(),
|
208
208
|
});
|
209
|
-
const
|
209
|
+
const cohereTextStreamChunkSchema = zodSchema(z.discriminatedUnion("is_finished", [
|
210
210
|
z.object({
|
211
211
|
text: z.string(),
|
212
212
|
is_finished: z.literal(false),
|
@@ -217,44 +217,6 @@ const cohereTextStreamingResponseSchema = new ZodSchema(z.discriminatedUnion("is
|
|
217
217
|
response: cohereTextGenerationResponseSchema,
|
218
218
|
}),
|
219
219
|
]));
|
220
|
-
async function createCohereTextGenerationFullDeltaIterableQueue(stream) {
|
221
|
-
const queue = new AsyncQueue();
|
222
|
-
let accumulatedText = "";
|
223
|
-
// process the stream asynchonously (no 'await' on purpose):
|
224
|
-
parseJsonStream({
|
225
|
-
stream,
|
226
|
-
schema: cohereTextStreamingResponseSchema,
|
227
|
-
process(event) {
|
228
|
-
if (event.is_finished === true) {
|
229
|
-
queue.push({
|
230
|
-
type: "delta",
|
231
|
-
fullDelta: {
|
232
|
-
content: accumulatedText,
|
233
|
-
isComplete: true,
|
234
|
-
delta: "",
|
235
|
-
},
|
236
|
-
valueDelta: "",
|
237
|
-
});
|
238
|
-
}
|
239
|
-
else {
|
240
|
-
accumulatedText += event.text;
|
241
|
-
queue.push({
|
242
|
-
type: "delta",
|
243
|
-
fullDelta: {
|
244
|
-
content: accumulatedText,
|
245
|
-
isComplete: false,
|
246
|
-
delta: event.text,
|
247
|
-
},
|
248
|
-
valueDelta: event.text,
|
249
|
-
});
|
250
|
-
}
|
251
|
-
},
|
252
|
-
onDone() {
|
253
|
-
queue.close();
|
254
|
-
},
|
255
|
-
});
|
256
|
-
return queue;
|
257
|
-
}
|
258
220
|
export const CohereTextGenerationResponseFormat = {
|
259
221
|
/**
|
260
222
|
* Returns the response as a JSON object.
|
@@ -269,6 +231,6 @@ export const CohereTextGenerationResponseFormat = {
|
|
269
231
|
*/
|
270
232
|
deltaIterable: {
|
271
233
|
stream: true,
|
272
|
-
handler:
|
234
|
+
handler: createJsonStreamResponseHandler(cohereTextStreamChunkSchema),
|
273
235
|
},
|
274
236
|
};
|
@@ -0,0 +1,33 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
const streamText_js_1 = require("../../model-function/generate-text/streamText.cjs");
|
4
|
+
const StreamingTestServer_js_1 = require("../../test/StreamingTestServer.cjs");
|
5
|
+
const arrayFromAsync_js_1 = require("../../test/arrayFromAsync.cjs");
|
6
|
+
const CohereApiConfiguration_js_1 = require("./CohereApiConfiguration.cjs");
|
7
|
+
const CohereTextGenerationModel_js_1 = require("./CohereTextGenerationModel.cjs");
|
8
|
+
describe("streamText", () => {
|
9
|
+
const server = new StreamingTestServer_js_1.StreamingTestServer("https://api.cohere.ai/v1/generate");
|
10
|
+
server.setupTestEnvironment();
|
11
|
+
it("should return a text stream", async () => {
|
12
|
+
server.responseChunks = [
|
13
|
+
`{"text":"Hello","is_finished":false}\n`,
|
14
|
+
`{"text":", ","is_finished":false}\n`,
|
15
|
+
`{"text":"world!","is_finished":false}\n`,
|
16
|
+
`{"is_finished":true,"finish_reason":"COMPLETE",` +
|
17
|
+
`"response":{"id":"40141e4f-2202-4e09-9188-c6ece324b7ba",` +
|
18
|
+
`"generations":[{"id":"c598f9d2-4a6d-46d6-beed-47d55b996540",` +
|
19
|
+
`"text":"Hello, world!","finish_reason":"COMPLETE"}],` +
|
20
|
+
`"prompt":"hello"}}\n`,
|
21
|
+
];
|
22
|
+
const stream = await (0, streamText_js_1.streamText)(new CohereTextGenerationModel_js_1.CohereTextGenerationModel({
|
23
|
+
api: new CohereApiConfiguration_js_1.CohereApiConfiguration({ apiKey: "test-key" }),
|
24
|
+
model: "command-light",
|
25
|
+
}), "hello");
|
26
|
+
// note: space moved to last chunk bc of trimming
|
27
|
+
expect(await (0, arrayFromAsync_js_1.arrayFromAsync)(stream)).toStrictEqual([
|
28
|
+
"Hello",
|
29
|
+
",",
|
30
|
+
" world!",
|
31
|
+
]);
|
32
|
+
});
|
33
|
+
});
|
@@ -0,0 +1,31 @@
|
|
1
|
+
import { streamText } from "../../model-function/generate-text/streamText.js";
|
2
|
+
import { StreamingTestServer } from "../../test/StreamingTestServer.js";
|
3
|
+
import { arrayFromAsync } from "../../test/arrayFromAsync.js";
|
4
|
+
import { CohereApiConfiguration } from "./CohereApiConfiguration.js";
|
5
|
+
import { CohereTextGenerationModel } from "./CohereTextGenerationModel.js";
|
6
|
+
describe("streamText", () => {
|
7
|
+
const server = new StreamingTestServer("https://api.cohere.ai/v1/generate");
|
8
|
+
server.setupTestEnvironment();
|
9
|
+
it("should return a text stream", async () => {
|
10
|
+
server.responseChunks = [
|
11
|
+
`{"text":"Hello","is_finished":false}\n`,
|
12
|
+
`{"text":", ","is_finished":false}\n`,
|
13
|
+
`{"text":"world!","is_finished":false}\n`,
|
14
|
+
`{"is_finished":true,"finish_reason":"COMPLETE",` +
|
15
|
+
`"response":{"id":"40141e4f-2202-4e09-9188-c6ece324b7ba",` +
|
16
|
+
`"generations":[{"id":"c598f9d2-4a6d-46d6-beed-47d55b996540",` +
|
17
|
+
`"text":"Hello, world!","finish_reason":"COMPLETE"}],` +
|
18
|
+
`"prompt":"hello"}}\n`,
|
19
|
+
];
|
20
|
+
const stream = await streamText(new CohereTextGenerationModel({
|
21
|
+
api: new CohereApiConfiguration({ apiKey: "test-key" }),
|
22
|
+
model: "command-light",
|
23
|
+
}), "hello");
|
24
|
+
// note: space moved to last chunk bc of trimming
|
25
|
+
expect(await arrayFromAsync(stream)).toStrictEqual([
|
26
|
+
"Hello",
|
27
|
+
",",
|
28
|
+
" world!",
|
29
|
+
]);
|
30
|
+
});
|
31
|
+
});
|
@@ -149,8 +149,7 @@ class ElevenLabsSpeechModel extends AbstractModel_js_1.AbstractModel {
|
|
149
149
|
if (!response.isFinal) {
|
150
150
|
queue.push({
|
151
151
|
type: "delta",
|
152
|
-
|
153
|
-
valueDelta: Buffer.from(response.audio, "base64"),
|
152
|
+
deltaValue: Buffer.from(response.audio, "base64"),
|
154
153
|
});
|
155
154
|
}
|
156
155
|
};
|
@@ -146,8 +146,7 @@ export class ElevenLabsSpeechModel extends AbstractModel {
|
|
146
146
|
if (!response.isFinal) {
|
147
147
|
queue.push({
|
148
148
|
type: "delta",
|
149
|
-
|
150
|
-
valueDelta: Buffer.from(response.audio, "base64"),
|
149
|
+
deltaValue: Buffer.from(response.audio, "base64"),
|
151
150
|
});
|
152
151
|
}
|
153
152
|
};
|