modelfusion 0.103.0 → 0.105.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (181) hide show
  1. package/CHANGELOG.md +56 -0
  2. package/model-function/Delta.d.ts +1 -2
  3. package/model-function/executeStreamCall.cjs +6 -4
  4. package/model-function/executeStreamCall.d.ts +2 -2
  5. package/model-function/executeStreamCall.js +6 -4
  6. package/model-function/generate-speech/streamSpeech.cjs +1 -2
  7. package/model-function/generate-speech/streamSpeech.js +1 -2
  8. package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +25 -29
  9. package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +3 -1
  10. package/model-function/generate-structure/StructureFromTextStreamingModel.js +25 -29
  11. package/model-function/generate-structure/StructureGenerationModel.d.ts +2 -0
  12. package/model-function/generate-structure/streamStructure.cjs +7 -8
  13. package/model-function/generate-structure/streamStructure.d.ts +1 -1
  14. package/model-function/generate-structure/streamStructure.js +7 -8
  15. package/model-function/generate-text/PromptTemplateFullTextModel.cjs +35 -0
  16. package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +41 -0
  17. package/model-function/generate-text/PromptTemplateFullTextModel.js +31 -0
  18. package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +3 -0
  19. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +2 -1
  20. package/model-function/generate-text/PromptTemplateTextStreamingModel.js +3 -0
  21. package/model-function/generate-text/TextGenerationModel.d.ts +2 -1
  22. package/model-function/generate-text/index.cjs +1 -0
  23. package/model-function/generate-text/index.d.ts +1 -0
  24. package/model-function/generate-text/index.js +1 -0
  25. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +2 -1
  26. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.d.ts +2 -2
  27. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +2 -1
  28. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs +9 -5
  29. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.d.ts +4 -4
  30. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.js +9 -5
  31. package/model-function/generate-text/prompt-template/ChatPrompt.cjs +38 -20
  32. package/model-function/generate-text/prompt-template/ChatPrompt.d.ts +33 -34
  33. package/model-function/generate-text/prompt-template/ChatPrompt.js +37 -18
  34. package/model-function/generate-text/prompt-template/ContentPart.cjs +11 -0
  35. package/model-function/generate-text/prompt-template/ContentPart.d.ts +30 -0
  36. package/model-function/generate-text/prompt-template/ContentPart.js +7 -0
  37. package/model-function/generate-text/prompt-template/InstructionPrompt.d.ts +7 -22
  38. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +40 -6
  39. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.d.ts +16 -4
  40. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +38 -5
  41. package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs +10 -5
  42. package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.d.ts +4 -4
  43. package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js +10 -5
  44. package/model-function/generate-text/prompt-template/TextPromptTemplate.cjs +8 -5
  45. package/model-function/generate-text/prompt-template/TextPromptTemplate.d.ts +4 -4
  46. package/model-function/generate-text/prompt-template/TextPromptTemplate.js +8 -5
  47. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +8 -4
  48. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.d.ts +2 -2
  49. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +8 -4
  50. package/model-function/generate-text/prompt-template/index.cjs +1 -1
  51. package/model-function/generate-text/prompt-template/index.d.ts +1 -1
  52. package/model-function/generate-text/prompt-template/index.js +1 -1
  53. package/model-function/generate-text/prompt-template/trimChatPrompt.cjs +0 -2
  54. package/model-function/generate-text/prompt-template/trimChatPrompt.d.ts +4 -4
  55. package/model-function/generate-text/prompt-template/trimChatPrompt.js +0 -2
  56. package/model-function/generate-text/streamText.cjs +27 -28
  57. package/model-function/generate-text/streamText.d.ts +1 -0
  58. package/model-function/generate-text/streamText.js +27 -28
  59. package/model-provider/anthropic/AnthropicPromptTemplate.cjs +9 -4
  60. package/model-provider/anthropic/AnthropicPromptTemplate.d.ts +4 -4
  61. package/model-provider/anthropic/AnthropicPromptTemplate.js +9 -4
  62. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +8 -14
  63. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +13 -4
  64. package/model-provider/anthropic/AnthropicTextGenerationModel.js +8 -14
  65. package/model-provider/anthropic/AnthropicTextGenerationModel.test.cjs +44 -0
  66. package/model-provider/anthropic/AnthropicTextGenerationModel.test.js +42 -0
  67. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +1 -1
  68. package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -44
  69. package/model-provider/cohere/CohereTextGenerationModel.d.ts +47 -13
  70. package/model-provider/cohere/CohereTextGenerationModel.js +7 -45
  71. package/model-provider/cohere/CohereTextGenerationModel.test.cjs +33 -0
  72. package/model-provider/cohere/CohereTextGenerationModel.test.js +31 -0
  73. package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +1 -2
  74. package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +1 -2
  75. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +29 -17
  76. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +4 -4
  77. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +29 -17
  78. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +7 -14
  79. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +157 -6
  80. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +8 -15
  81. package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.cjs +37 -0
  82. package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.d.ts +1 -0
  83. package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.js +35 -0
  84. package/model-provider/mistral/MistralChatModel.cjs +30 -104
  85. package/model-provider/mistral/MistralChatModel.d.ts +49 -16
  86. package/model-provider/mistral/MistralChatModel.js +30 -104
  87. package/model-provider/mistral/MistralChatModel.test.cjs +51 -0
  88. package/model-provider/mistral/MistralChatModel.test.d.ts +1 -0
  89. package/model-provider/mistral/MistralChatModel.test.js +49 -0
  90. package/model-provider/mistral/MistralPromptTemplate.cjs +13 -5
  91. package/model-provider/mistral/MistralPromptTemplate.d.ts +4 -4
  92. package/model-provider/mistral/MistralPromptTemplate.js +13 -5
  93. package/model-provider/ollama/OllamaChatModel.cjs +7 -43
  94. package/model-provider/ollama/OllamaChatModel.d.ts +63 -11
  95. package/model-provider/ollama/OllamaChatModel.js +7 -43
  96. package/model-provider/ollama/OllamaChatModel.test.cjs +27 -0
  97. package/model-provider/ollama/OllamaChatModel.test.d.ts +1 -0
  98. package/model-provider/ollama/OllamaChatModel.test.js +25 -0
  99. package/model-provider/ollama/OllamaChatPromptTemplate.cjs +43 -17
  100. package/model-provider/ollama/OllamaChatPromptTemplate.d.ts +4 -4
  101. package/model-provider/ollama/OllamaChatPromptTemplate.js +43 -17
  102. package/model-provider/ollama/OllamaCompletionModel.cjs +22 -43
  103. package/model-provider/ollama/OllamaCompletionModel.d.ts +65 -9
  104. package/model-provider/ollama/OllamaCompletionModel.js +23 -44
  105. package/model-provider/ollama/OllamaCompletionModel.test.cjs +101 -13
  106. package/model-provider/ollama/OllamaCompletionModel.test.js +78 -13
  107. package/model-provider/openai/{chat/AbstractOpenAIChatModel.cjs → AbstractOpenAIChatModel.cjs} +71 -15
  108. package/model-provider/openai/{chat/AbstractOpenAIChatModel.d.ts → AbstractOpenAIChatModel.d.ts} +273 -19
  109. package/model-provider/openai/{chat/AbstractOpenAIChatModel.js → AbstractOpenAIChatModel.js} +71 -15
  110. package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.cjs → OpenAIChatFunctionCallStructureGenerationModel.cjs} +18 -2
  111. package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts → OpenAIChatFunctionCallStructureGenerationModel.d.ts} +41 -11
  112. package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.js → OpenAIChatFunctionCallStructureGenerationModel.js} +18 -2
  113. package/model-provider/openai/{chat/OpenAIChatMessage.d.ts → OpenAIChatMessage.d.ts} +3 -3
  114. package/model-provider/openai/{chat/OpenAIChatModel.cjs → OpenAIChatModel.cjs} +5 -5
  115. package/model-provider/openai/{chat/OpenAIChatModel.d.ts → OpenAIChatModel.d.ts} +12 -12
  116. package/model-provider/openai/{chat/OpenAIChatModel.js → OpenAIChatModel.js} +5 -5
  117. package/model-provider/openai/OpenAIChatModel.test.cjs +94 -0
  118. package/model-provider/openai/OpenAIChatModel.test.d.ts +1 -0
  119. package/model-provider/openai/OpenAIChatModel.test.js +92 -0
  120. package/model-provider/openai/OpenAIChatPromptTemplate.cjs +114 -0
  121. package/model-provider/openai/OpenAIChatPromptTemplate.d.ts +20 -0
  122. package/model-provider/openai/OpenAIChatPromptTemplate.js +107 -0
  123. package/model-provider/openai/OpenAICompletionModel.cjs +32 -84
  124. package/model-provider/openai/OpenAICompletionModel.d.ts +29 -12
  125. package/model-provider/openai/OpenAICompletionModel.js +33 -85
  126. package/model-provider/openai/OpenAICompletionModel.test.cjs +53 -0
  127. package/model-provider/openai/OpenAICompletionModel.test.d.ts +1 -0
  128. package/model-provider/openai/OpenAICompletionModel.test.js +51 -0
  129. package/model-provider/openai/OpenAICostCalculator.cjs +1 -1
  130. package/model-provider/openai/OpenAICostCalculator.js +1 -1
  131. package/model-provider/openai/OpenAIFacade.cjs +2 -2
  132. package/model-provider/openai/OpenAIFacade.d.ts +3 -3
  133. package/model-provider/openai/OpenAIFacade.js +2 -2
  134. package/model-provider/openai/OpenAITranscriptionModel.d.ts +6 -6
  135. package/model-provider/openai/TikTokenTokenizer.d.ts +1 -1
  136. package/model-provider/openai/{chat/countOpenAIChatMessageTokens.cjs → countOpenAIChatMessageTokens.cjs} +2 -2
  137. package/model-provider/openai/{chat/countOpenAIChatMessageTokens.js → countOpenAIChatMessageTokens.js} +2 -2
  138. package/model-provider/openai/index.cjs +6 -6
  139. package/model-provider/openai/index.d.ts +5 -6
  140. package/model-provider/openai/index.js +5 -5
  141. package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +4 -4
  142. package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +6 -6
  143. package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +4 -4
  144. package/model-provider/stability/StabilityImageGenerationModel.d.ts +1 -1
  145. package/package.json +5 -5
  146. package/test/JsonTestServer.cjs +33 -0
  147. package/test/JsonTestServer.d.ts +7 -0
  148. package/test/JsonTestServer.js +29 -0
  149. package/test/StreamingTestServer.cjs +55 -0
  150. package/test/StreamingTestServer.d.ts +7 -0
  151. package/test/StreamingTestServer.js +51 -0
  152. package/test/arrayFromAsync.cjs +13 -0
  153. package/test/arrayFromAsync.d.ts +1 -0
  154. package/test/arrayFromAsync.js +9 -0
  155. package/util/streaming/createEventSourceResponseHandler.cjs +9 -0
  156. package/util/streaming/createEventSourceResponseHandler.d.ts +4 -0
  157. package/util/streaming/createEventSourceResponseHandler.js +5 -0
  158. package/util/streaming/createJsonStreamResponseHandler.cjs +9 -0
  159. package/util/streaming/createJsonStreamResponseHandler.d.ts +4 -0
  160. package/util/streaming/createJsonStreamResponseHandler.js +5 -0
  161. package/util/streaming/parseEventSourceStreamAsAsyncIterable.cjs +52 -0
  162. package/util/streaming/parseEventSourceStreamAsAsyncIterable.d.ts +6 -0
  163. package/util/streaming/parseEventSourceStreamAsAsyncIterable.js +48 -0
  164. package/util/streaming/parseJsonStreamAsAsyncIterable.cjs +21 -0
  165. package/util/streaming/parseJsonStreamAsAsyncIterable.d.ts +6 -0
  166. package/util/streaming/parseJsonStreamAsAsyncIterable.js +17 -0
  167. package/model-function/generate-text/prompt-template/Content.cjs +0 -2
  168. package/model-function/generate-text/prompt-template/Content.d.ts +0 -20
  169. package/model-provider/openai/chat/OpenAIChatModel.test.cjs +0 -61
  170. package/model-provider/openai/chat/OpenAIChatModel.test.js +0 -59
  171. package/model-provider/openai/chat/OpenAIChatPromptTemplate.cjs +0 -72
  172. package/model-provider/openai/chat/OpenAIChatPromptTemplate.d.ts +0 -20
  173. package/model-provider/openai/chat/OpenAIChatPromptTemplate.js +0 -65
  174. package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +0 -156
  175. package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +0 -19
  176. package/model-provider/openai/chat/OpenAIChatStreamIterable.js +0 -152
  177. /package/{model-function/generate-text/prompt-template/Content.js → model-provider/anthropic/AnthropicTextGenerationModel.test.d.ts} +0 -0
  178. /package/model-provider/{openai/chat/OpenAIChatModel.test.d.ts → cohere/CohereTextGenerationModel.test.d.ts} +0 -0
  179. /package/model-provider/openai/{chat/OpenAIChatMessage.cjs → OpenAIChatMessage.cjs} +0 -0
  180. /package/model-provider/openai/{chat/OpenAIChatMessage.js → OpenAIChatMessage.js} +0 -0
  181. /package/model-provider/openai/{chat/countOpenAIChatMessageTokens.d.ts → countOpenAIChatMessageTokens.d.ts} +0 -0
@@ -1,7 +1,8 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.chat = exports.instruction = exports.text = void 0;
4
- const ChatPrompt_js_1 = require("../../model-function/generate-text/prompt-template/ChatPrompt.cjs");
4
+ const ContentPart_js_1 = require("../../model-function/generate-text/prompt-template/ContentPart.cjs");
5
+ const InvalidPromptError_js_1 = require("../../model-function/generate-text/prompt-template/InvalidPromptError.cjs");
5
6
  const HUMAN_PREFIX = "\n\nHuman:";
6
7
  const ASSISTANT_PREFIX = "\n\nAssistant:";
7
8
  /**
@@ -26,9 +27,10 @@ exports.text = text;
26
27
  function instruction() {
27
28
  return {
28
29
  format(prompt) {
30
+ const instruction = (0, ContentPart_js_1.validateContentIsString)(prompt.instruction, prompt);
29
31
  let text = prompt.system ?? "";
30
32
  text += HUMAN_PREFIX;
31
- text += prompt.instruction;
33
+ text += instruction;
32
34
  text += ASSISTANT_PREFIX;
33
35
  if (prompt.responsePrefix != null) {
34
36
  text += prompt.responsePrefix;
@@ -47,13 +49,13 @@ exports.instruction = instruction;
47
49
  function chat() {
48
50
  return {
49
51
  format(prompt) {
50
- (0, ChatPrompt_js_1.validateChatPrompt)(prompt);
51
52
  let text = prompt.system ?? "";
52
53
  for (const { role, content } of prompt.messages) {
53
54
  switch (role) {
54
55
  case "user": {
56
+ const textContent = (0, ContentPart_js_1.validateContentIsString)(content, prompt);
55
57
  text += HUMAN_PREFIX;
56
- text += content;
58
+ text += textContent;
57
59
  break;
58
60
  }
59
61
  case "assistant": {
@@ -61,6 +63,9 @@ function chat() {
61
63
  text += content;
62
64
  break;
63
65
  }
66
+ case "tool": {
67
+ throw new InvalidPromptError_js_1.InvalidPromptError("Tool messages are not supported.", prompt);
68
+ }
64
69
  default: {
65
70
  const _exhaustiveCheck = role;
66
71
  throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
@@ -1,6 +1,6 @@
1
1
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
2
- import { TextChatPrompt } from "../../model-function/generate-text/prompt-template/ChatPrompt.js";
3
- import { TextInstructionPrompt } from "../../model-function/generate-text/prompt-template/InstructionPrompt.js";
2
+ import { ChatPrompt } from "../../model-function/generate-text/prompt-template/ChatPrompt.js";
3
+ import { InstructionPrompt } from "../../model-function/generate-text/prompt-template/InstructionPrompt.js";
4
4
  /**
5
5
  * Formats a text prompt as an Anthropic prompt.
6
6
  */
@@ -8,10 +8,10 @@ export declare function text(): TextGenerationPromptTemplate<string, string>;
8
8
  /**
9
9
  * Formats an instruction prompt as an Anthropic prompt.
10
10
  */
11
- export declare function instruction(): TextGenerationPromptTemplate<TextInstructionPrompt, string>;
11
+ export declare function instruction(): TextGenerationPromptTemplate<InstructionPrompt, string>;
12
12
  /**
13
13
  * Formats a chat prompt as an Anthropic prompt.
14
14
  *
15
15
  * @see https://docs.anthropic.com/claude/docs/constructing-a-prompt
16
16
  */
17
- export declare function chat(): TextGenerationPromptTemplate<TextChatPrompt, string>;
17
+ export declare function chat(): TextGenerationPromptTemplate<ChatPrompt, string>;
@@ -1,4 +1,5 @@
1
- import { validateChatPrompt, } from "../../model-function/generate-text/prompt-template/ChatPrompt.js";
1
+ import { validateContentIsString } from "../../model-function/generate-text/prompt-template/ContentPart.js";
2
+ import { InvalidPromptError } from "../../model-function/generate-text/prompt-template/InvalidPromptError.js";
2
3
  const HUMAN_PREFIX = "\n\nHuman:";
3
4
  const ASSISTANT_PREFIX = "\n\nAssistant:";
4
5
  /**
@@ -22,9 +23,10 @@ export function text() {
22
23
  export function instruction() {
23
24
  return {
24
25
  format(prompt) {
26
+ const instruction = validateContentIsString(prompt.instruction, prompt);
25
27
  let text = prompt.system ?? "";
26
28
  text += HUMAN_PREFIX;
27
- text += prompt.instruction;
29
+ text += instruction;
28
30
  text += ASSISTANT_PREFIX;
29
31
  if (prompt.responsePrefix != null) {
30
32
  text += prompt.responsePrefix;
@@ -42,13 +44,13 @@ export function instruction() {
42
44
  export function chat() {
43
45
  return {
44
46
  format(prompt) {
45
- validateChatPrompt(prompt);
46
47
  let text = prompt.system ?? "";
47
48
  for (const { role, content } of prompt.messages) {
48
49
  switch (role) {
49
50
  case "user": {
51
+ const textContent = validateContentIsString(content, prompt);
50
52
  text += HUMAN_PREFIX;
51
- text += content;
53
+ text += textContent;
52
54
  break;
53
55
  }
54
56
  case "assistant": {
@@ -56,6 +58,9 @@ export function chat() {
56
58
  text += content;
57
59
  break;
58
60
  }
61
+ case "tool": {
62
+ throw new InvalidPromptError("Tool messages are not supported.", prompt);
63
+ }
59
64
  default: {
60
65
  const _exhaustiveCheck = role;
61
66
  throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
@@ -85,7 +85,7 @@ class AnthropicTextGenerationModel extends AbstractModel_js_1.AbstractModel {
85
85
  model: this.settings.model,
86
86
  prompt,
87
87
  stream: responseFormat.stream,
88
- max_tokens_to_sample: this.settings.maxGenerationTokens,
88
+ max_tokens_to_sample: this.settings.maxGenerationTokens ?? 100,
89
89
  temperature: this.settings.temperature,
90
90
  top_k: this.settings.topK,
91
91
  top_p: this.settings.topP,
@@ -140,6 +140,10 @@ class AnthropicTextGenerationModel extends AbstractModel_js_1.AbstractModel {
140
140
  responseFormat: exports.AnthropicTextGenerationResponseFormat.deltaIterable,
141
141
  });
142
142
  }
143
+ extractTextDelta(delta) {
144
+ const chunk = delta;
145
+ return chunk.completion;
146
+ }
143
147
  /**
144
148
  * Returns this model with a text prompt template.
145
149
  */
@@ -179,14 +183,13 @@ const anthropicTextGenerationResponseSchema = zod_1.z.object({
179
183
  stop_reason: zod_1.z.string(),
180
184
  model: zod_1.z.string(),
181
185
  });
182
- const anthropicTextStreamingResponseSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.object({
186
+ const anthropicTextStreamChunkSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.object({
183
187
  completion: zod_1.z.string(),
184
188
  stop_reason: zod_1.z.string().nullable(),
185
189
  model: zod_1.z.string(),
186
190
  }));
187
191
  async function createAnthropicFullDeltaIterableQueue(stream) {
188
192
  const queue = new AsyncQueue_js_1.AsyncQueue();
189
- let content = "";
190
193
  // process the stream asynchonously (no 'await' on purpose):
191
194
  (0, parseEventSourceStream_js_1.parseEventSourceStream)({ stream })
192
195
  .then(async (events) => {
@@ -203,18 +206,9 @@ async function createAnthropicFullDeltaIterableQueue(stream) {
203
206
  const data = event.data;
204
207
  const eventData = (0, parseJSON_js_1.parseJSON)({
205
208
  text: data,
206
- schema: anthropicTextStreamingResponseSchema,
207
- });
208
- content += eventData.completion;
209
- queue.push({
210
- type: "delta",
211
- fullDelta: {
212
- content,
213
- isComplete: eventData.stop_reason != null,
214
- delta: eventData.completion,
215
- },
216
- valueDelta: eventData.completion,
209
+ schema: anthropicTextStreamChunkSchema,
217
210
  });
211
+ queue.push({ type: "delta", deltaValue: eventData });
218
212
  if (eventData.stop_reason != null) {
219
213
  queue.close();
220
214
  }
@@ -62,7 +62,12 @@ export declare class AnthropicTextGenerationModel extends AbstractModel<Anthropi
62
62
  }[];
63
63
  }>;
64
64
  private translateFinishReason;
65
- doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
65
+ doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<{
66
+ model: string;
67
+ completion: string;
68
+ stop_reason: string | null;
69
+ }>>>;
70
+ extractTextDelta(delta: unknown): string;
66
71
  /**
67
72
  * Returns this model with a text prompt template.
68
73
  */
@@ -70,11 +75,11 @@ export declare class AnthropicTextGenerationModel extends AbstractModel<Anthropi
70
75
  /**
71
76
  * Returns this model with an instruction prompt template.
72
77
  */
73
- withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").TextInstructionPrompt, string, AnthropicTextGenerationModelSettings, this>;
78
+ withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").InstructionPrompt, string, AnthropicTextGenerationModelSettings, this>;
74
79
  /**
75
80
  * Returns this model with a chat prompt template.
76
81
  */
77
- withChatPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").TextChatPrompt, string, AnthropicTextGenerationModelSettings, this>;
82
+ withChatPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").ChatPrompt, string, AnthropicTextGenerationModelSettings, this>;
78
83
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, AnthropicTextGenerationModelSettings, this>;
79
84
  withSettings(additionalSettings: Partial<AnthropicTextGenerationModelSettings>): this;
80
85
  }
@@ -116,7 +121,11 @@ export declare const AnthropicTextGenerationResponseFormat: {
116
121
  stream: true;
117
122
  handler: ({ response }: {
118
123
  response: Response;
119
- }) => Promise<AsyncIterable<Delta<string>>>;
124
+ }) => Promise<AsyncIterable<Delta<{
125
+ model: string;
126
+ completion: string;
127
+ stop_reason: string | null;
128
+ }>>>;
120
129
  };
121
130
  };
122
131
  export {};
@@ -82,7 +82,7 @@ export class AnthropicTextGenerationModel extends AbstractModel {
82
82
  model: this.settings.model,
83
83
  prompt,
84
84
  stream: responseFormat.stream,
85
- max_tokens_to_sample: this.settings.maxGenerationTokens,
85
+ max_tokens_to_sample: this.settings.maxGenerationTokens ?? 100,
86
86
  temperature: this.settings.temperature,
87
87
  top_k: this.settings.topK,
88
88
  top_p: this.settings.topP,
@@ -137,6 +137,10 @@ export class AnthropicTextGenerationModel extends AbstractModel {
137
137
  responseFormat: AnthropicTextGenerationResponseFormat.deltaIterable,
138
138
  });
139
139
  }
140
+ extractTextDelta(delta) {
141
+ const chunk = delta;
142
+ return chunk.completion;
143
+ }
140
144
  /**
141
145
  * Returns this model with a text prompt template.
142
146
  */
@@ -175,14 +179,13 @@ const anthropicTextGenerationResponseSchema = z.object({
175
179
  stop_reason: z.string(),
176
180
  model: z.string(),
177
181
  });
178
- const anthropicTextStreamingResponseSchema = new ZodSchema(z.object({
182
+ const anthropicTextStreamChunkSchema = new ZodSchema(z.object({
179
183
  completion: z.string(),
180
184
  stop_reason: z.string().nullable(),
181
185
  model: z.string(),
182
186
  }));
183
187
  async function createAnthropicFullDeltaIterableQueue(stream) {
184
188
  const queue = new AsyncQueue();
185
- let content = "";
186
189
  // process the stream asynchonously (no 'await' on purpose):
187
190
  parseEventSourceStream({ stream })
188
191
  .then(async (events) => {
@@ -199,18 +202,9 @@ async function createAnthropicFullDeltaIterableQueue(stream) {
199
202
  const data = event.data;
200
203
  const eventData = parseJSON({
201
204
  text: data,
202
- schema: anthropicTextStreamingResponseSchema,
203
- });
204
- content += eventData.completion;
205
- queue.push({
206
- type: "delta",
207
- fullDelta: {
208
- content,
209
- isComplete: eventData.stop_reason != null,
210
- delta: eventData.completion,
211
- },
212
- valueDelta: eventData.completion,
205
+ schema: anthropicTextStreamChunkSchema,
213
206
  });
207
+ queue.push({ type: "delta", deltaValue: eventData });
214
208
  if (eventData.stop_reason != null) {
215
209
  queue.close();
216
210
  }
@@ -0,0 +1,44 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const streamText_js_1 = require("../../model-function/generate-text/streamText.cjs");
4
+ const StreamingTestServer_js_1 = require("../../test/StreamingTestServer.cjs");
5
+ const arrayFromAsync_js_1 = require("../../test/arrayFromAsync.cjs");
6
+ const AnthropicApiConfiguration_js_1 = require("./AnthropicApiConfiguration.cjs");
7
+ const AnthropicTextGenerationModel_js_1 = require("./AnthropicTextGenerationModel.cjs");
8
+ describe("streamText", () => {
9
+ const server = new StreamingTestServer_js_1.StreamingTestServer("https://api.anthropic.com/v1/complete");
10
+ server.setupTestEnvironment();
11
+ it("should return a text stream", async () => {
12
+ server.responseChunks = [
13
+ `event: completion\n` +
14
+ `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
15
+ `"completion":" Hello","stop_reason":null,"model":"claude-instant-1.2",` +
16
+ `"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
17
+ `event: completion\n` +
18
+ `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
19
+ `"completion":", ","stop_reason":null,"model":"claude-instant-1.2",` +
20
+ `"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
21
+ `event: completion\n` +
22
+ `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
23
+ `"completion":"world!","stop_reason":null,"model":"claude-instant-1.2",` +
24
+ `"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
25
+ `event: ping\ndata: {"type": "ping"}\n\n`,
26
+ `event: completion\n` +
27
+ `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
28
+ `"completion":"","stop_reason":"stop_sequence","model":"claude-instant-1.2",` +
29
+ `"stop":"\\n\\nHuman:","log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
30
+ ];
31
+ const stream = await (0, streamText_js_1.streamText)(new AnthropicTextGenerationModel_js_1.AnthropicTextGenerationModel({
32
+ api: new AnthropicApiConfiguration_js_1.AnthropicApiConfiguration({
33
+ apiKey: "test-key",
34
+ }),
35
+ model: "claude-instant-1",
36
+ }).withTextPrompt(), "hello");
37
+ // note: space moved to last chunk bc of trimming
38
+ expect(await (0, arrayFromAsync_js_1.arrayFromAsync)(stream)).toStrictEqual([
39
+ "Hello",
40
+ ",",
41
+ " world!",
42
+ ]);
43
+ });
44
+ });
@@ -0,0 +1,42 @@
1
+ import { streamText } from "../../model-function/generate-text/streamText.js";
2
+ import { StreamingTestServer } from "../../test/StreamingTestServer.js";
3
+ import { arrayFromAsync } from "../../test/arrayFromAsync.js";
4
+ import { AnthropicApiConfiguration } from "./AnthropicApiConfiguration.js";
5
+ import { AnthropicTextGenerationModel } from "./AnthropicTextGenerationModel.js";
6
+ describe("streamText", () => {
7
+ const server = new StreamingTestServer("https://api.anthropic.com/v1/complete");
8
+ server.setupTestEnvironment();
9
+ it("should return a text stream", async () => {
10
+ server.responseChunks = [
11
+ `event: completion\n` +
12
+ `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
13
+ `"completion":" Hello","stop_reason":null,"model":"claude-instant-1.2",` +
14
+ `"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
15
+ `event: completion\n` +
16
+ `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
17
+ `"completion":", ","stop_reason":null,"model":"claude-instant-1.2",` +
18
+ `"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
19
+ `event: completion\n` +
20
+ `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
21
+ `"completion":"world!","stop_reason":null,"model":"claude-instant-1.2",` +
22
+ `"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
23
+ `event: ping\ndata: {"type": "ping"}\n\n`,
24
+ `event: completion\n` +
25
+ `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
26
+ `"completion":"","stop_reason":"stop_sequence","model":"claude-instant-1.2",` +
27
+ `"stop":"\\n\\nHuman:","log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
28
+ ];
29
+ const stream = await streamText(new AnthropicTextGenerationModel({
30
+ api: new AnthropicApiConfiguration({
31
+ apiKey: "test-key",
32
+ }),
33
+ model: "claude-instant-1",
34
+ }).withTextPrompt(), "hello");
35
+ // note: space moved to last chunk bc of trimming
36
+ expect(await arrayFromAsync(stream)).toStrictEqual([
37
+ "Hello",
38
+ ",",
39
+ " world!",
40
+ ]);
41
+ });
42
+ });
@@ -49,7 +49,7 @@ export declare class Automatic1111ImageGenerationModel extends AbstractModel<Aut
49
49
  }>;
50
50
  withTextPrompt(): PromptTemplateImageGenerationModel<string, Automatic1111ImageGenerationPrompt, Automatic1111ImageGenerationSettings, this>;
51
51
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: PromptTemplate<INPUT_PROMPT, Automatic1111ImageGenerationPrompt>): PromptTemplateImageGenerationModel<INPUT_PROMPT, Automatic1111ImageGenerationPrompt, Automatic1111ImageGenerationSettings, this>;
52
- withSettings(additionalSettings: Automatic1111ImageGenerationSettings): this;
52
+ withSettings(additionalSettings: Partial<Automatic1111ImageGenerationSettings>): this;
53
53
  }
54
54
  declare const Automatic1111ImageGenerationResponseSchema: z.ZodObject<{
55
55
  images: z.ZodArray<z.ZodString, "many">;
@@ -10,8 +10,7 @@ const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/gene
10
10
  const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
11
11
  const TextPromptTemplate_js_1 = require("../../model-function/generate-text/prompt-template/TextPromptTemplate.cjs");
12
12
  const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
13
- const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
14
- const parseJsonStream_js_1 = require("../../util/streaming/parseJsonStream.cjs");
13
+ const createJsonStreamResponseHandler_js_1 = require("../../util/streaming/createJsonStreamResponseHandler.cjs");
15
14
  const CohereApiConfiguration_js_1 = require("./CohereApiConfiguration.cjs");
16
15
  const CohereError_js_1 = require("./CohereError.cjs");
17
16
  const CohereTokenizer_js_1 = require("./CohereTokenizer.cjs");
@@ -163,8 +162,9 @@ class CohereTextGenerationModel extends AbstractModel_js_1.AbstractModel {
163
162
  responseFormat: exports.CohereTextGenerationResponseFormat.deltaIterable,
164
163
  });
165
164
  }
166
- extractTextDelta(fullDelta) {
167
- return fullDelta.delta;
165
+ extractTextDelta(delta) {
166
+ const chunk = delta;
167
+ return chunk.is_finished === true ? "" : chunk.text;
168
168
  }
169
169
  /**
170
170
  * Returns this model with an instruction prompt template.
@@ -210,7 +210,7 @@ const cohereTextGenerationResponseSchema = zod_1.z.object({
210
210
  })
211
211
  .optional(),
212
212
  });
213
- const cohereTextStreamingResponseSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.discriminatedUnion("is_finished", [
213
+ const cohereTextStreamChunkSchema = (0, ZodSchema_js_1.zodSchema)(zod_1.z.discriminatedUnion("is_finished", [
214
214
  zod_1.z.object({
215
215
  text: zod_1.z.string(),
216
216
  is_finished: zod_1.z.literal(false),
@@ -221,44 +221,6 @@ const cohereTextStreamingResponseSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.d
221
221
  response: cohereTextGenerationResponseSchema,
222
222
  }),
223
223
  ]));
224
- async function createCohereTextGenerationFullDeltaIterableQueue(stream) {
225
- const queue = new AsyncQueue_js_1.AsyncQueue();
226
- let accumulatedText = "";
227
- // process the stream asynchonously (no 'await' on purpose):
228
- (0, parseJsonStream_js_1.parseJsonStream)({
229
- stream,
230
- schema: cohereTextStreamingResponseSchema,
231
- process(event) {
232
- if (event.is_finished === true) {
233
- queue.push({
234
- type: "delta",
235
- fullDelta: {
236
- content: accumulatedText,
237
- isComplete: true,
238
- delta: "",
239
- },
240
- valueDelta: "",
241
- });
242
- }
243
- else {
244
- accumulatedText += event.text;
245
- queue.push({
246
- type: "delta",
247
- fullDelta: {
248
- content: accumulatedText,
249
- isComplete: false,
250
- delta: event.text,
251
- },
252
- valueDelta: event.text,
253
- });
254
- }
255
- },
256
- onDone() {
257
- queue.close();
258
- },
259
- });
260
- return queue;
261
- }
262
224
  exports.CohereTextGenerationResponseFormat = {
263
225
  /**
264
226
  * Returns the response as a JSON object.
@@ -273,6 +235,6 @@ exports.CohereTextGenerationResponseFormat = {
273
235
  */
274
236
  deltaIterable: {
275
237
  stream: true,
276
- handler: async ({ response }) => createCohereTextGenerationFullDeltaIterableQueue(response.body),
238
+ handler: (0, createJsonStreamResponseHandler_js_1.createJsonStreamResponseHandler)(cohereTextStreamChunkSchema),
277
239
  },
278
240
  };
@@ -3,7 +3,6 @@ import { FunctionOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
- import { Delta } from "../../model-function/Delta.js";
7
6
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
8
7
  import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
9
8
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
@@ -86,19 +85,39 @@ export declare class CohereTextGenerationModel extends AbstractModel<CohereTextG
86
85
  }[];
87
86
  }>;
88
87
  private translateFinishReason;
89
- doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
90
- extractTextDelta(fullDelta: CohereTextGenerationDelta): string | undefined;
88
+ doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
89
+ text: string;
90
+ is_finished: false;
91
+ } | {
92
+ response: {
93
+ prompt: string;
94
+ id: string;
95
+ generations: {
96
+ text: string;
97
+ id: string;
98
+ finish_reason?: string | undefined;
99
+ }[];
100
+ meta?: {
101
+ api_version: {
102
+ version: string;
103
+ };
104
+ } | undefined;
105
+ };
106
+ finish_reason: string;
107
+ is_finished: true;
108
+ }>>>;
109
+ extractTextDelta(delta: unknown): string;
91
110
  /**
92
111
  * Returns this model with an instruction prompt template.
93
112
  */
94
- withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").TextInstructionPrompt, string, CohereTextGenerationModelSettings, this>;
113
+ withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").InstructionPrompt, string, CohereTextGenerationModelSettings, this>;
95
114
  /**
96
115
  * Returns this model with a chat prompt template.
97
116
  */
98
117
  withChatPrompt(options?: {
99
118
  user?: string;
100
119
  assistant?: string;
101
- }): PromptTemplateTextStreamingModel<import("../../index.js").TextChatPrompt, string, CohereTextGenerationModelSettings, this>;
120
+ }): PromptTemplateTextStreamingModel<import("../../index.js").ChatPrompt, string, CohereTextGenerationModelSettings, this>;
102
121
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, CohereTextGenerationModelSettings, this>;
103
122
  withSettings(additionalSettings: Partial<CohereTextGenerationModelSettings>): this;
104
123
  }
@@ -163,11 +182,6 @@ declare const cohereTextGenerationResponseSchema: z.ZodObject<{
163
182
  } | undefined;
164
183
  }>;
165
184
  export type CohereTextGenerationResponse = z.infer<typeof cohereTextGenerationResponseSchema>;
166
- export type CohereTextGenerationDelta = {
167
- content: string;
168
- isComplete: boolean;
169
- delta: string;
170
- };
171
185
  export type CohereTextGenerationResponseFormatType<T> = {
172
186
  stream: boolean;
173
187
  handler: ResponseHandler<T>;
@@ -177,7 +191,7 @@ export declare const CohereTextGenerationResponseFormat: {
177
191
  * Returns the response as a JSON object.
178
192
  */
179
193
  json: {
180
- stream: false;
194
+ stream: boolean;
181
195
  handler: ResponseHandler<{
182
196
  prompt: string;
183
197
  id: string;
@@ -198,10 +212,30 @@ export declare const CohereTextGenerationResponseFormat: {
198
212
  * of the response stream.
199
213
  */
200
214
  deltaIterable: {
201
- stream: true;
215
+ stream: boolean;
202
216
  handler: ({ response }: {
203
217
  response: Response;
204
- }) => Promise<AsyncIterable<Delta<string>>>;
218
+ }) => Promise<AsyncIterable<import("../../index.js").Delta<{
219
+ text: string;
220
+ is_finished: false;
221
+ } | {
222
+ response: {
223
+ prompt: string;
224
+ id: string;
225
+ generations: {
226
+ text: string;
227
+ id: string;
228
+ finish_reason?: string | undefined;
229
+ }[];
230
+ meta?: {
231
+ api_version: {
232
+ version: string;
233
+ };
234
+ } | undefined;
235
+ };
236
+ finish_reason: string;
237
+ is_finished: true;
238
+ }>>>;
205
239
  };
206
240
  };
207
241
  export {};
@@ -1,14 +1,13 @@
1
1
  import { z } from "zod";
2
2
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
- import { ZodSchema } from "../../core/schema/ZodSchema.js";
4
+ import { zodSchema } from "../../core/schema/ZodSchema.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
6
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
7
7
  import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
8
8
  import { chat, instruction, } from "../../model-function/generate-text/prompt-template/TextPromptTemplate.js";
9
9
  import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
10
- import { AsyncQueue } from "../../util/AsyncQueue.js";
11
- import { parseJsonStream } from "../../util/streaming/parseJsonStream.js";
10
+ import { createJsonStreamResponseHandler } from "../../util/streaming/createJsonStreamResponseHandler.js";
12
11
  import { CohereApiConfiguration } from "./CohereApiConfiguration.js";
13
12
  import { failedCohereCallResponseHandler } from "./CohereError.js";
14
13
  import { CohereTokenizer } from "./CohereTokenizer.js";
@@ -160,8 +159,9 @@ export class CohereTextGenerationModel extends AbstractModel {
160
159
  responseFormat: CohereTextGenerationResponseFormat.deltaIterable,
161
160
  });
162
161
  }
163
- extractTextDelta(fullDelta) {
164
- return fullDelta.delta;
162
+ extractTextDelta(delta) {
163
+ const chunk = delta;
164
+ return chunk.is_finished === true ? "" : chunk.text;
165
165
  }
166
166
  /**
167
167
  * Returns this model with an instruction prompt template.
@@ -206,7 +206,7 @@ const cohereTextGenerationResponseSchema = z.object({
206
206
  })
207
207
  .optional(),
208
208
  });
209
- const cohereTextStreamingResponseSchema = new ZodSchema(z.discriminatedUnion("is_finished", [
209
+ const cohereTextStreamChunkSchema = zodSchema(z.discriminatedUnion("is_finished", [
210
210
  z.object({
211
211
  text: z.string(),
212
212
  is_finished: z.literal(false),
@@ -217,44 +217,6 @@ const cohereTextStreamingResponseSchema = new ZodSchema(z.discriminatedUnion("is
217
217
  response: cohereTextGenerationResponseSchema,
218
218
  }),
219
219
  ]));
220
- async function createCohereTextGenerationFullDeltaIterableQueue(stream) {
221
- const queue = new AsyncQueue();
222
- let accumulatedText = "";
223
- // process the stream asynchonously (no 'await' on purpose):
224
- parseJsonStream({
225
- stream,
226
- schema: cohereTextStreamingResponseSchema,
227
- process(event) {
228
- if (event.is_finished === true) {
229
- queue.push({
230
- type: "delta",
231
- fullDelta: {
232
- content: accumulatedText,
233
- isComplete: true,
234
- delta: "",
235
- },
236
- valueDelta: "",
237
- });
238
- }
239
- else {
240
- accumulatedText += event.text;
241
- queue.push({
242
- type: "delta",
243
- fullDelta: {
244
- content: accumulatedText,
245
- isComplete: false,
246
- delta: event.text,
247
- },
248
- valueDelta: event.text,
249
- });
250
- }
251
- },
252
- onDone() {
253
- queue.close();
254
- },
255
- });
256
- return queue;
257
- }
258
220
  export const CohereTextGenerationResponseFormat = {
259
221
  /**
260
222
  * Returns the response as a JSON object.
@@ -269,6 +231,6 @@ export const CohereTextGenerationResponseFormat = {
269
231
  */
270
232
  deltaIterable: {
271
233
  stream: true,
272
- handler: async ({ response }) => createCohereTextGenerationFullDeltaIterableQueue(response.body),
234
+ handler: createJsonStreamResponseHandler(cohereTextStreamChunkSchema),
273
235
  },
274
236
  };