modelfusion 0.104.0 → 0.105.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (164) hide show
  1. package/CHANGELOG.md +49 -0
  2. package/model-function/Delta.d.ts +1 -2
  3. package/model-function/executeStreamCall.cjs +6 -4
  4. package/model-function/executeStreamCall.d.ts +2 -2
  5. package/model-function/executeStreamCall.js +6 -4
  6. package/model-function/generate-speech/streamSpeech.cjs +1 -2
  7. package/model-function/generate-speech/streamSpeech.js +1 -2
  8. package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +25 -29
  9. package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +3 -1
  10. package/model-function/generate-structure/StructureFromTextStreamingModel.js +25 -29
  11. package/model-function/generate-structure/StructureGenerationModel.d.ts +2 -0
  12. package/model-function/generate-structure/streamStructure.cjs +7 -8
  13. package/model-function/generate-structure/streamStructure.d.ts +1 -1
  14. package/model-function/generate-structure/streamStructure.js +7 -8
  15. package/model-function/generate-text/PromptTemplateFullTextModel.cjs +35 -0
  16. package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +41 -0
  17. package/model-function/generate-text/PromptTemplateFullTextModel.js +31 -0
  18. package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +3 -0
  19. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +2 -1
  20. package/model-function/generate-text/PromptTemplateTextStreamingModel.js +3 -0
  21. package/model-function/generate-text/TextGenerationModel.d.ts +2 -1
  22. package/model-function/generate-text/index.cjs +1 -0
  23. package/model-function/generate-text/index.d.ts +1 -0
  24. package/model-function/generate-text/index.js +1 -0
  25. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +2 -2
  26. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +1 -1
  27. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs +8 -5
  28. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.js +7 -4
  29. package/model-function/generate-text/prompt-template/ChatPrompt.cjs +42 -0
  30. package/model-function/generate-text/prompt-template/ChatPrompt.d.ts +27 -5
  31. package/model-function/generate-text/prompt-template/ChatPrompt.js +41 -1
  32. package/model-function/generate-text/prompt-template/{Content.cjs → ContentPart.cjs} +1 -1
  33. package/model-function/generate-text/prompt-template/ContentPart.d.ts +30 -0
  34. package/model-function/generate-text/prompt-template/{Content.js → ContentPart.js} +1 -1
  35. package/model-function/generate-text/prompt-template/InstructionPrompt.d.ts +3 -2
  36. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +7 -4
  37. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +5 -2
  38. package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs +8 -4
  39. package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js +6 -2
  40. package/model-function/generate-text/prompt-template/TextPromptTemplate.cjs +8 -4
  41. package/model-function/generate-text/prompt-template/TextPromptTemplate.js +6 -2
  42. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +7 -3
  43. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +6 -2
  44. package/model-function/generate-text/prompt-template/index.cjs +1 -1
  45. package/model-function/generate-text/prompt-template/index.d.ts +1 -1
  46. package/model-function/generate-text/prompt-template/index.js +1 -1
  47. package/model-function/generate-text/streamText.cjs +27 -28
  48. package/model-function/generate-text/streamText.d.ts +1 -0
  49. package/model-function/generate-text/streamText.js +27 -28
  50. package/model-provider/anthropic/AnthropicPromptTemplate.cjs +7 -3
  51. package/model-provider/anthropic/AnthropicPromptTemplate.js +5 -1
  52. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +8 -14
  53. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +11 -2
  54. package/model-provider/anthropic/AnthropicTextGenerationModel.js +8 -14
  55. package/model-provider/anthropic/AnthropicTextGenerationModel.test.cjs +44 -0
  56. package/model-provider/anthropic/AnthropicTextGenerationModel.test.js +42 -0
  57. package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -44
  58. package/model-provider/cohere/CohereTextGenerationModel.d.ts +45 -11
  59. package/model-provider/cohere/CohereTextGenerationModel.js +7 -45
  60. package/model-provider/cohere/CohereTextGenerationModel.test.cjs +33 -0
  61. package/model-provider/cohere/CohereTextGenerationModel.test.d.ts +1 -0
  62. package/model-provider/cohere/CohereTextGenerationModel.test.js +31 -0
  63. package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +1 -2
  64. package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +1 -2
  65. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +6 -1
  66. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +6 -1
  67. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +7 -14
  68. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +157 -6
  69. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +8 -15
  70. package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.cjs +37 -0
  71. package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.d.ts +1 -0
  72. package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.js +35 -0
  73. package/model-provider/mistral/MistralChatModel.cjs +30 -104
  74. package/model-provider/mistral/MistralChatModel.d.ts +47 -14
  75. package/model-provider/mistral/MistralChatModel.js +30 -104
  76. package/model-provider/mistral/MistralChatModel.test.cjs +51 -0
  77. package/model-provider/mistral/MistralChatModel.test.d.ts +1 -0
  78. package/model-provider/mistral/MistralChatModel.test.js +49 -0
  79. package/model-provider/mistral/MistralPromptTemplate.cjs +11 -4
  80. package/model-provider/mistral/MistralPromptTemplate.js +9 -2
  81. package/model-provider/ollama/OllamaChatModel.cjs +7 -43
  82. package/model-provider/ollama/OllamaChatModel.d.ts +61 -9
  83. package/model-provider/ollama/OllamaChatModel.js +7 -43
  84. package/model-provider/ollama/OllamaChatModel.test.cjs +27 -0
  85. package/model-provider/ollama/OllamaChatModel.test.d.ts +1 -0
  86. package/model-provider/ollama/OllamaChatModel.test.js +25 -0
  87. package/model-provider/ollama/OllamaChatPromptTemplate.cjs +34 -4
  88. package/model-provider/ollama/OllamaChatPromptTemplate.js +34 -4
  89. package/model-provider/ollama/OllamaCompletionModel.cjs +22 -43
  90. package/model-provider/ollama/OllamaCompletionModel.d.ts +65 -9
  91. package/model-provider/ollama/OllamaCompletionModel.js +23 -44
  92. package/model-provider/ollama/OllamaCompletionModel.test.cjs +101 -13
  93. package/model-provider/ollama/OllamaCompletionModel.test.js +78 -13
  94. package/model-provider/openai/{chat/AbstractOpenAIChatModel.cjs → AbstractOpenAIChatModel.cjs} +71 -15
  95. package/model-provider/openai/{chat/AbstractOpenAIChatModel.d.ts → AbstractOpenAIChatModel.d.ts} +273 -19
  96. package/model-provider/openai/{chat/AbstractOpenAIChatModel.js → AbstractOpenAIChatModel.js} +71 -15
  97. package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.cjs → OpenAIChatFunctionCallStructureGenerationModel.cjs} +18 -2
  98. package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts → OpenAIChatFunctionCallStructureGenerationModel.d.ts} +41 -11
  99. package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.js → OpenAIChatFunctionCallStructureGenerationModel.js} +18 -2
  100. package/model-provider/openai/{chat/OpenAIChatMessage.d.ts → OpenAIChatMessage.d.ts} +3 -3
  101. package/model-provider/openai/{chat/OpenAIChatModel.cjs → OpenAIChatModel.cjs} +5 -5
  102. package/model-provider/openai/{chat/OpenAIChatModel.d.ts → OpenAIChatModel.d.ts} +12 -12
  103. package/model-provider/openai/{chat/OpenAIChatModel.js → OpenAIChatModel.js} +5 -5
  104. package/model-provider/openai/OpenAIChatModel.test.cjs +94 -0
  105. package/model-provider/openai/OpenAIChatModel.test.d.ts +1 -0
  106. package/model-provider/openai/OpenAIChatModel.test.js +92 -0
  107. package/model-provider/openai/OpenAIChatPromptTemplate.cjs +114 -0
  108. package/model-provider/openai/{chat/OpenAIChatPromptTemplate.d.ts → OpenAIChatPromptTemplate.d.ts} +3 -3
  109. package/model-provider/openai/OpenAIChatPromptTemplate.js +107 -0
  110. package/model-provider/openai/OpenAICompletionModel.cjs +32 -84
  111. package/model-provider/openai/OpenAICompletionModel.d.ts +27 -10
  112. package/model-provider/openai/OpenAICompletionModel.js +33 -85
  113. package/model-provider/openai/OpenAICompletionModel.test.cjs +53 -0
  114. package/model-provider/openai/OpenAICompletionModel.test.d.ts +1 -0
  115. package/model-provider/openai/OpenAICompletionModel.test.js +51 -0
  116. package/model-provider/openai/OpenAICostCalculator.cjs +1 -1
  117. package/model-provider/openai/OpenAICostCalculator.js +1 -1
  118. package/model-provider/openai/OpenAIFacade.cjs +2 -2
  119. package/model-provider/openai/OpenAIFacade.d.ts +3 -3
  120. package/model-provider/openai/OpenAIFacade.js +2 -2
  121. package/model-provider/openai/OpenAITranscriptionModel.d.ts +6 -6
  122. package/model-provider/openai/TikTokenTokenizer.d.ts +1 -1
  123. package/model-provider/openai/{chat/countOpenAIChatMessageTokens.cjs → countOpenAIChatMessageTokens.cjs} +2 -2
  124. package/model-provider/openai/{chat/countOpenAIChatMessageTokens.js → countOpenAIChatMessageTokens.js} +2 -2
  125. package/model-provider/openai/index.cjs +6 -6
  126. package/model-provider/openai/index.d.ts +5 -6
  127. package/model-provider/openai/index.js +5 -5
  128. package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +4 -4
  129. package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +6 -6
  130. package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +4 -4
  131. package/package.json +5 -5
  132. package/test/JsonTestServer.cjs +33 -0
  133. package/test/JsonTestServer.d.ts +7 -0
  134. package/test/JsonTestServer.js +29 -0
  135. package/test/StreamingTestServer.cjs +55 -0
  136. package/test/StreamingTestServer.d.ts +7 -0
  137. package/test/StreamingTestServer.js +51 -0
  138. package/test/arrayFromAsync.cjs +13 -0
  139. package/test/arrayFromAsync.d.ts +1 -0
  140. package/test/arrayFromAsync.js +9 -0
  141. package/util/streaming/createEventSourceResponseHandler.cjs +9 -0
  142. package/util/streaming/createEventSourceResponseHandler.d.ts +4 -0
  143. package/util/streaming/createEventSourceResponseHandler.js +5 -0
  144. package/util/streaming/createJsonStreamResponseHandler.cjs +9 -0
  145. package/util/streaming/createJsonStreamResponseHandler.d.ts +4 -0
  146. package/util/streaming/createJsonStreamResponseHandler.js +5 -0
  147. package/util/streaming/parseEventSourceStreamAsAsyncIterable.cjs +52 -0
  148. package/util/streaming/parseEventSourceStreamAsAsyncIterable.d.ts +6 -0
  149. package/util/streaming/parseEventSourceStreamAsAsyncIterable.js +48 -0
  150. package/util/streaming/parseJsonStreamAsAsyncIterable.cjs +21 -0
  151. package/util/streaming/parseJsonStreamAsAsyncIterable.d.ts +6 -0
  152. package/util/streaming/parseJsonStreamAsAsyncIterable.js +17 -0
  153. package/model-function/generate-text/prompt-template/Content.d.ts +0 -25
  154. package/model-provider/openai/chat/OpenAIChatModel.test.cjs +0 -61
  155. package/model-provider/openai/chat/OpenAIChatModel.test.js +0 -59
  156. package/model-provider/openai/chat/OpenAIChatPromptTemplate.cjs +0 -70
  157. package/model-provider/openai/chat/OpenAIChatPromptTemplate.js +0 -63
  158. package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +0 -156
  159. package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +0 -19
  160. package/model-provider/openai/chat/OpenAIChatStreamIterable.js +0 -152
  161. /package/model-provider/{openai/chat/OpenAIChatModel.test.d.ts → anthropic/AnthropicTextGenerationModel.test.d.ts} +0 -0
  162. /package/model-provider/openai/{chat/OpenAIChatMessage.cjs → OpenAIChatMessage.cjs} +0 -0
  163. /package/model-provider/openai/{chat/OpenAIChatMessage.js → OpenAIChatMessage.js} +0 -0
  164. /package/model-provider/openai/{chat/countOpenAIChatMessageTokens.d.ts → countOpenAIChatMessageTokens.d.ts} +0 -0
@@ -1,8 +1,8 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.countOpenAIChatPromptTokens = exports.countOpenAIChatMessageTokens = exports.OPENAI_CHAT_MESSAGE_BASE_TOKEN_COUNT = exports.OPENAI_CHAT_PROMPT_BASE_TOKEN_COUNT = void 0;
4
- const countTokens_js_1 = require("../../../model-function/tokenize-text/countTokens.cjs");
5
- const TikTokenTokenizer_js_1 = require("../TikTokenTokenizer.cjs");
4
+ const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
5
+ const TikTokenTokenizer_js_1 = require("./TikTokenTokenizer.cjs");
6
6
  const OpenAIChatModel_js_1 = require("./OpenAIChatModel.cjs");
7
7
  /**
8
8
  * Prompt tokens that are included automatically for every full
@@ -1,5 +1,5 @@
1
- import { countTokens } from "../../../model-function/tokenize-text/countTokens.js";
2
- import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
1
+ import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
2
+ import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
3
3
  import { getOpenAIChatModelInformation, } from "./OpenAIChatModel.js";
4
4
  /**
5
5
  * Prompt tokens that are included automatically for every full
@@ -26,9 +26,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
26
26
  return result;
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.OpenAIChatPrompt = exports.openai = void 0;
29
+ exports.openai = exports.OpenAIChatPrompt = void 0;
30
+ __exportStar(require("./AbstractOpenAIChatModel.cjs"), exports);
30
31
  __exportStar(require("./AzureOpenAIApiConfiguration.cjs"), exports);
31
32
  __exportStar(require("./OpenAIApiConfiguration.cjs"), exports);
33
+ __exportStar(require("./OpenAIChatMessage.cjs"), exports);
34
+ __exportStar(require("./OpenAIChatModel.cjs"), exports);
35
+ exports.OpenAIChatPrompt = __importStar(require("./OpenAIChatPromptTemplate.cjs"));
32
36
  __exportStar(require("./OpenAICompletionModel.cjs"), exports);
33
37
  __exportStar(require("./OpenAICostCalculator.cjs"), exports);
34
38
  exports.openai = __importStar(require("./OpenAIFacade.cjs"));
@@ -37,8 +41,4 @@ __exportStar(require("./OpenAISpeechModel.cjs"), exports);
37
41
  __exportStar(require("./OpenAITextEmbeddingModel.cjs"), exports);
38
42
  __exportStar(require("./OpenAITranscriptionModel.cjs"), exports);
39
43
  __exportStar(require("./TikTokenTokenizer.cjs"), exports);
40
- __exportStar(require("./chat/AbstractOpenAIChatModel.cjs"), exports);
41
- __exportStar(require("./chat/OpenAIChatMessage.cjs"), exports);
42
- __exportStar(require("./chat/OpenAIChatModel.cjs"), exports);
43
- exports.OpenAIChatPrompt = __importStar(require("./chat/OpenAIChatPromptTemplate.cjs"));
44
- __exportStar(require("./chat/countOpenAIChatMessageTokens.cjs"), exports);
44
+ __exportStar(require("./countOpenAIChatMessageTokens.cjs"), exports);
@@ -1,5 +1,9 @@
1
+ export * from "./AbstractOpenAIChatModel.js";
1
2
  export * from "./AzureOpenAIApiConfiguration.js";
2
3
  export * from "./OpenAIApiConfiguration.js";
4
+ export * from "./OpenAIChatMessage.js";
5
+ export * from "./OpenAIChatModel.js";
6
+ export * as OpenAIChatPrompt from "./OpenAIChatPromptTemplate.js";
3
7
  export * from "./OpenAICompletionModel.js";
4
8
  export * from "./OpenAICostCalculator.js";
5
9
  export { OpenAIErrorData } from "./OpenAIError.js";
@@ -9,9 +13,4 @@ export * from "./OpenAISpeechModel.js";
9
13
  export * from "./OpenAITextEmbeddingModel.js";
10
14
  export * from "./OpenAITranscriptionModel.js";
11
15
  export * from "./TikTokenTokenizer.js";
12
- export * from "./chat/AbstractOpenAIChatModel.js";
13
- export * from "./chat/OpenAIChatMessage.js";
14
- export * from "./chat/OpenAIChatModel.js";
15
- export * as OpenAIChatPrompt from "./chat/OpenAIChatPromptTemplate.js";
16
- export { OpenAIChatDelta } from "./chat/OpenAIChatStreamIterable.js";
17
- export * from "./chat/countOpenAIChatMessageTokens.js";
16
+ export * from "./countOpenAIChatMessageTokens.js";
@@ -1,5 +1,9 @@
1
+ export * from "./AbstractOpenAIChatModel.js";
1
2
  export * from "./AzureOpenAIApiConfiguration.js";
2
3
  export * from "./OpenAIApiConfiguration.js";
4
+ export * from "./OpenAIChatMessage.js";
5
+ export * from "./OpenAIChatModel.js";
6
+ export * as OpenAIChatPrompt from "./OpenAIChatPromptTemplate.js";
3
7
  export * from "./OpenAICompletionModel.js";
4
8
  export * from "./OpenAICostCalculator.js";
5
9
  export * as openai from "./OpenAIFacade.js";
@@ -8,8 +12,4 @@ export * from "./OpenAISpeechModel.js";
8
12
  export * from "./OpenAITextEmbeddingModel.js";
9
13
  export * from "./OpenAITranscriptionModel.js";
10
14
  export * from "./TikTokenTokenizer.js";
11
- export * from "./chat/AbstractOpenAIChatModel.js";
12
- export * from "./chat/OpenAIChatMessage.js";
13
- export * from "./chat/OpenAIChatModel.js";
14
- export * as OpenAIChatPrompt from "./chat/OpenAIChatPromptTemplate.js";
15
- export * from "./chat/countOpenAIChatMessageTokens.js";
15
+ export * from "./countOpenAIChatMessageTokens.js";
@@ -2,10 +2,10 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.OpenAICompatibleChatModel = void 0;
4
4
  const StructureFromTextStreamingModel_js_1 = require("../../model-function/generate-structure/StructureFromTextStreamingModel.cjs");
5
- const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
5
+ const PromptTemplateFullTextModel_js_1 = require("../../model-function/generate-text/PromptTemplateFullTextModel.cjs");
6
6
  const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
7
- const AbstractOpenAIChatModel_js_1 = require("../openai/chat/AbstractOpenAIChatModel.cjs");
8
- const OpenAIChatPromptTemplate_js_1 = require("../openai/chat/OpenAIChatPromptTemplate.cjs");
7
+ const AbstractOpenAIChatModel_js_1 = require("../openai/AbstractOpenAIChatModel.cjs");
8
+ const OpenAIChatPromptTemplate_js_1 = require("../openai/OpenAIChatPromptTemplate.cjs");
9
9
  /**
10
10
  * Create a text generation model that calls an API that is compatible with OpenAI's chat API.
11
11
  *
@@ -82,7 +82,7 @@ class OpenAICompatibleChatModel extends AbstractOpenAIChatModel_js_1.AbstractOpe
82
82
  return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.chat)());
83
83
  }
84
84
  withPromptTemplate(promptTemplate) {
85
- return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
85
+ return new PromptTemplateFullTextModel_js_1.PromptTemplateFullTextModel({
86
86
  model: this.withSettings({
87
87
  stopSequences: [
88
88
  ...(this.settings.stopSequences ?? []),
@@ -1,11 +1,11 @@
1
1
  import { StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
2
2
  import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
3
- import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
3
+ import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
4
4
  import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
5
5
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
6
6
  import { ToolCallGenerationModel } from "../../tool/generate-tool-call/ToolCallGenerationModel.js";
7
7
  import { ToolCallsOrTextGenerationModel } from "../../tool/generate-tool-calls-or-text/ToolCallsOrTextGenerationModel.js";
8
- import { AbstractOpenAIChatCallSettings, AbstractOpenAIChatModel, OpenAIChatPrompt } from "../openai/chat/AbstractOpenAIChatModel.js";
8
+ import { AbstractOpenAIChatCallSettings, AbstractOpenAIChatModel, OpenAIChatPrompt } from "../openai/AbstractOpenAIChatModel.js";
9
9
  export type OpenAICompatibleProviderName = `openaicompatible` | `openaicompatible-${string}`;
10
10
  export interface OpenAICompatibleChatSettings extends TextGenerationModelSettings, Omit<AbstractOpenAIChatCallSettings, "stop" | "maxTokens"> {
11
11
  provider?: OpenAICompatibleProviderName;
@@ -31,15 +31,15 @@ export declare class OpenAICompatibleChatModel extends AbstractOpenAIChatModel<O
31
31
  /**
32
32
  * Returns this model with a text prompt template.
33
33
  */
34
- withTextPrompt(): PromptTemplateTextStreamingModel<string, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
34
+ withTextPrompt(): PromptTemplateFullTextModel<string, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
35
35
  /**
36
36
  * Returns this model with an instruction prompt template.
37
37
  */
38
- withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").InstructionPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
38
+ withInstructionPrompt(): PromptTemplateFullTextModel<import("../../index.js").InstructionPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
39
39
  /**
40
40
  * Returns this model with a chat prompt template.
41
41
  */
42
- withChatPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").ChatPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
43
- withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt>): PromptTemplateTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
42
+ withChatPrompt(): PromptTemplateFullTextModel<import("../../index.js").ChatPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
43
+ withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt>): PromptTemplateFullTextModel<INPUT_PROMPT, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
44
44
  withSettings(additionalSettings: Partial<OpenAICompatibleChatSettings>): this;
45
45
  }
@@ -1,8 +1,8 @@
1
1
  import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
2
- import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
2
+ import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
3
3
  import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
4
- import { AbstractOpenAIChatModel, } from "../openai/chat/AbstractOpenAIChatModel.js";
5
- import { chat, instruction, text, } from "../openai/chat/OpenAIChatPromptTemplate.js";
4
+ import { AbstractOpenAIChatModel, } from "../openai/AbstractOpenAIChatModel.js";
5
+ import { chat, instruction, text } from "../openai/OpenAIChatPromptTemplate.js";
6
6
  /**
7
7
  * Create a text generation model that calls an API that is compatible with OpenAI's chat API.
8
8
  *
@@ -79,7 +79,7 @@ export class OpenAICompatibleChatModel extends AbstractOpenAIChatModel {
79
79
  return this.withPromptTemplate(chat());
80
80
  }
81
81
  withPromptTemplate(promptTemplate) {
82
- return new PromptTemplateTextStreamingModel({
82
+ return new PromptTemplateFullTextModel({
83
83
  model: this.withSettings({
84
84
  stopSequences: [
85
85
  ...(this.settings.stopSequences ?? []),
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
3
  "description": "The TypeScript library for building multi-modal AI applications.",
4
- "version": "0.104.0",
4
+ "version": "0.105.0",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [
@@ -61,15 +61,15 @@
61
61
  },
62
62
  "scripts": {
63
63
  "lint": "eslint --ext .ts src",
64
- "clean": "rimraf build dist .turbo",
64
+ "clean": "rimraf build dist .turbo node_modules",
65
65
  "build": "pnpm build:esm && pnpm build:cjs && pnpm build:copy-files",
66
66
  "build:esm": "tsc --outDir dist/",
67
67
  "build:cjs": "tsc --outDir build/cjs/ -p tsconfig.cjs.json && node bin/prepare-cjs.js",
68
68
  "build:copy-files": "copyfiles --flat package.json ../../README.md ../../LICENSE ../../CHANGELOG.md dist",
69
69
  "test": "vitest --config vitest.config.js --run src",
70
- "test:watch": "vitest watch--config vitest.config.js",
71
- "test:coverage": "vitest run --coverage",
72
- "test:coverage:ui": "vitest run --coverage --ui",
70
+ "test:watch": "vitest watch --config vitest.config.js",
71
+ "test:coverage": "vitest run --config vitest.config.js --coverage",
72
+ "test:coverage:ui": "vitest --config vitest.config.js --coverage --ui",
73
73
  "dist": "pnpm clean && pnpm lint && pnpm test && pnpm build"
74
74
  },
75
75
  "dependencies": {
@@ -0,0 +1,33 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.JsonTestServer = void 0;
4
+ const msw_1 = require("msw");
5
+ const node_1 = require("msw/node");
6
+ class JsonTestServer {
7
+ constructor(url) {
8
+ Object.defineProperty(this, "server", {
9
+ enumerable: true,
10
+ configurable: true,
11
+ writable: true,
12
+ value: void 0
13
+ });
14
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
15
+ Object.defineProperty(this, "responseBodyJson", {
16
+ enumerable: true,
17
+ configurable: true,
18
+ writable: true,
19
+ value: {}
20
+ });
21
+ const responseBodyJson = () => this.responseBodyJson;
22
+ this.server = (0, node_1.setupServer)(msw_1.http.post(url, () => msw_1.HttpResponse.json(responseBodyJson())));
23
+ }
24
+ setupTestEnvironment() {
25
+ beforeAll(() => this.server.listen());
26
+ beforeEach(() => {
27
+ this.responseBodyJson = {};
28
+ });
29
+ afterEach(() => this.server.resetHandlers());
30
+ afterAll(() => this.server.close());
31
+ }
32
+ }
33
+ exports.JsonTestServer = JsonTestServer;
@@ -0,0 +1,7 @@
1
+ import { SetupServer } from "msw/node";
2
+ export declare class JsonTestServer {
3
+ readonly server: SetupServer;
4
+ responseBodyJson: any;
5
+ constructor(url: string);
6
+ setupTestEnvironment(): void;
7
+ }
@@ -0,0 +1,29 @@
1
+ import { HttpResponse, http } from "msw";
2
+ import { setupServer } from "msw/node";
3
+ export class JsonTestServer {
4
+ constructor(url) {
5
+ Object.defineProperty(this, "server", {
6
+ enumerable: true,
7
+ configurable: true,
8
+ writable: true,
9
+ value: void 0
10
+ });
11
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
12
+ Object.defineProperty(this, "responseBodyJson", {
13
+ enumerable: true,
14
+ configurable: true,
15
+ writable: true,
16
+ value: {}
17
+ });
18
+ const responseBodyJson = () => this.responseBodyJson;
19
+ this.server = setupServer(http.post(url, () => HttpResponse.json(responseBodyJson())));
20
+ }
21
+ setupTestEnvironment() {
22
+ beforeAll(() => this.server.listen());
23
+ beforeEach(() => {
24
+ this.responseBodyJson = {};
25
+ });
26
+ afterEach(() => this.server.resetHandlers());
27
+ afterAll(() => this.server.close());
28
+ }
29
+ }
@@ -0,0 +1,55 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.StreamingTestServer = void 0;
4
+ const msw_1 = require("msw");
5
+ const node_1 = require("msw/node");
6
+ class StreamingTestServer {
7
+ constructor(url) {
8
+ Object.defineProperty(this, "server", {
9
+ enumerable: true,
10
+ configurable: true,
11
+ writable: true,
12
+ value: void 0
13
+ });
14
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
15
+ Object.defineProperty(this, "responseChunks", {
16
+ enumerable: true,
17
+ configurable: true,
18
+ writable: true,
19
+ value: []
20
+ });
21
+ const responseChunks = () => this.responseChunks;
22
+ this.server = (0, node_1.setupServer)(msw_1.http.post(url, () => {
23
+ const encoder = new TextEncoder();
24
+ const stream = new ReadableStream({
25
+ async start(controller) {
26
+ try {
27
+ for (const chunk of responseChunks()) {
28
+ controller.enqueue(encoder.encode(chunk));
29
+ }
30
+ }
31
+ finally {
32
+ controller.close();
33
+ }
34
+ },
35
+ });
36
+ return new msw_1.HttpResponse(stream, {
37
+ status: 200,
38
+ headers: {
39
+ "Content-Type": "text/event-stream",
40
+ "Cache-Control": "no-cache",
41
+ Connection: "keep-alive",
42
+ },
43
+ });
44
+ }));
45
+ }
46
+ setupTestEnvironment() {
47
+ beforeAll(() => this.server.listen());
48
+ beforeEach(() => {
49
+ this.responseChunks = [];
50
+ });
51
+ afterEach(() => this.server.resetHandlers());
52
+ afterAll(() => this.server.close());
53
+ }
54
+ }
55
+ exports.StreamingTestServer = StreamingTestServer;
@@ -0,0 +1,7 @@
1
+ import { SetupServer } from "msw/node";
2
+ export declare class StreamingTestServer {
3
+ readonly server: SetupServer;
4
+ responseChunks: any[];
5
+ constructor(url: string);
6
+ setupTestEnvironment(): void;
7
+ }
@@ -0,0 +1,51 @@
1
+ import { HttpResponse, http } from "msw";
2
+ import { setupServer } from "msw/node";
3
+ export class StreamingTestServer {
4
+ constructor(url) {
5
+ Object.defineProperty(this, "server", {
6
+ enumerable: true,
7
+ configurable: true,
8
+ writable: true,
9
+ value: void 0
10
+ });
11
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
12
+ Object.defineProperty(this, "responseChunks", {
13
+ enumerable: true,
14
+ configurable: true,
15
+ writable: true,
16
+ value: []
17
+ });
18
+ const responseChunks = () => this.responseChunks;
19
+ this.server = setupServer(http.post(url, () => {
20
+ const encoder = new TextEncoder();
21
+ const stream = new ReadableStream({
22
+ async start(controller) {
23
+ try {
24
+ for (const chunk of responseChunks()) {
25
+ controller.enqueue(encoder.encode(chunk));
26
+ }
27
+ }
28
+ finally {
29
+ controller.close();
30
+ }
31
+ },
32
+ });
33
+ return new HttpResponse(stream, {
34
+ status: 200,
35
+ headers: {
36
+ "Content-Type": "text/event-stream",
37
+ "Cache-Control": "no-cache",
38
+ Connection: "keep-alive",
39
+ },
40
+ });
41
+ }));
42
+ }
43
+ setupTestEnvironment() {
44
+ beforeAll(() => this.server.listen());
45
+ beforeEach(() => {
46
+ this.responseChunks = [];
47
+ });
48
+ afterEach(() => this.server.resetHandlers());
49
+ afterAll(() => this.server.close());
50
+ }
51
+ }
@@ -0,0 +1,13 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.arrayFromAsync = void 0;
4
+ // TODO once Array.fromAsync is in Node.js,
5
+ // use Array.fromAsync instead of this function
6
+ async function arrayFromAsync(iterable) {
7
+ const result = [];
8
+ for await (const item of iterable) {
9
+ result.push(item);
10
+ }
11
+ return result;
12
+ }
13
+ exports.arrayFromAsync = arrayFromAsync;
@@ -0,0 +1 @@
1
+ export declare function arrayFromAsync<T>(iterable: AsyncIterable<T>): Promise<T[]>;
@@ -0,0 +1,9 @@
1
+ // TODO once Array.fromAsync is in Node.js,
2
+ // use Array.fromAsync instead of this function
3
+ export async function arrayFromAsync(iterable) {
4
+ const result = [];
5
+ for await (const item of iterable) {
6
+ result.push(item);
7
+ }
8
+ return result;
9
+ }
@@ -0,0 +1,9 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.createEventSourceResponseHandler = void 0;
4
+ const parseEventSourceStreamAsAsyncIterable_js_1 = require("./parseEventSourceStreamAsAsyncIterable.cjs");
5
+ const createEventSourceResponseHandler = (schema) => ({ response }) => (0, parseEventSourceStreamAsAsyncIterable_js_1.parseEventSourceStreamAsAsyncIterable)({
6
+ stream: response.body,
7
+ schema,
8
+ });
9
+ exports.createEventSourceResponseHandler = createEventSourceResponseHandler;
@@ -0,0 +1,4 @@
1
+ import { Schema } from "../../core/schema/Schema.js";
2
+ export declare const createEventSourceResponseHandler: <T>(schema: Schema<T>) => ({ response }: {
3
+ response: Response;
4
+ }) => Promise<AsyncIterable<import("../../index.js").Delta<T>>>;
@@ -0,0 +1,5 @@
1
+ import { parseEventSourceStreamAsAsyncIterable } from "./parseEventSourceStreamAsAsyncIterable.js";
2
+ export const createEventSourceResponseHandler = (schema) => ({ response }) => parseEventSourceStreamAsAsyncIterable({
3
+ stream: response.body,
4
+ schema,
5
+ });
@@ -0,0 +1,9 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.createJsonStreamResponseHandler = void 0;
4
+ const parseJsonStreamAsAsyncIterable_js_1 = require("./parseJsonStreamAsAsyncIterable.cjs");
5
+ const createJsonStreamResponseHandler = (schema) => ({ response }) => (0, parseJsonStreamAsAsyncIterable_js_1.parseJsonStreamAsAsyncIterable)({
6
+ stream: response.body,
7
+ schema,
8
+ });
9
+ exports.createJsonStreamResponseHandler = createJsonStreamResponseHandler;
@@ -0,0 +1,4 @@
1
+ import { Schema } from "../../core/schema/Schema.js";
2
+ export declare const createJsonStreamResponseHandler: <T>(schema: Schema<T>) => ({ response }: {
3
+ response: Response;
4
+ }) => Promise<AsyncIterable<import("../../index.js").Delta<T>>>;
@@ -0,0 +1,5 @@
1
+ import { parseJsonStreamAsAsyncIterable } from "./parseJsonStreamAsAsyncIterable.js";
2
+ export const createJsonStreamResponseHandler = (schema) => ({ response }) => parseJsonStreamAsAsyncIterable({
3
+ stream: response.body,
4
+ schema,
5
+ });
@@ -0,0 +1,52 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.parseEventSourceStreamAsAsyncIterable = void 0;
4
+ const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
5
+ const AsyncQueue_js_1 = require("../AsyncQueue.cjs");
6
+ const parseEventSourceStream_js_1 = require("./parseEventSourceStream.cjs");
7
+ async function parseEventSourceStreamAsAsyncIterable({ stream, schema, }) {
8
+ const queue = new AsyncQueue_js_1.AsyncQueue();
9
+ // process the stream asynchonously (no 'await' on purpose):
10
+ (0, parseEventSourceStream_js_1.parseEventSourceStream)({ stream })
11
+ .then(async (events) => {
12
+ try {
13
+ for await (const event of events) {
14
+ const data = event.data;
15
+ if (data === "[DONE]") {
16
+ queue.close();
17
+ return;
18
+ }
19
+ const parseResult = (0, parseJSON_js_1.safeParseJSON)({
20
+ text: data,
21
+ schema,
22
+ });
23
+ if (!parseResult.success) {
24
+ queue.push({
25
+ type: "error",
26
+ error: parseResult.error,
27
+ });
28
+ // Note: the queue is not closed on purpose. Some providers might add additional
29
+ // chunks that are not parsable, and ModelFusion should be resilient to that.
30
+ continue;
31
+ }
32
+ const completionChunk = parseResult.data;
33
+ queue.push({
34
+ type: "delta",
35
+ deltaValue: completionChunk,
36
+ });
37
+ }
38
+ }
39
+ catch (error) {
40
+ queue.push({ type: "error", error });
41
+ queue.close();
42
+ return;
43
+ }
44
+ })
45
+ .catch((error) => {
46
+ queue.push({ type: "error", error });
47
+ queue.close();
48
+ return;
49
+ });
50
+ return queue;
51
+ }
52
+ exports.parseEventSourceStreamAsAsyncIterable = parseEventSourceStreamAsAsyncIterable;
@@ -0,0 +1,6 @@
1
+ import { Schema } from "../../core/schema/Schema.js";
2
+ import { Delta } from "../../model-function/Delta.js";
3
+ export declare function parseEventSourceStreamAsAsyncIterable<T>({ stream, schema, }: {
4
+ stream: ReadableStream<Uint8Array>;
5
+ schema: Schema<T>;
6
+ }): Promise<AsyncIterable<Delta<T>>>;
@@ -0,0 +1,48 @@
1
+ import { safeParseJSON } from "../../core/schema/parseJSON.js";
2
+ import { AsyncQueue } from "../AsyncQueue.js";
3
+ import { parseEventSourceStream } from "./parseEventSourceStream.js";
4
+ export async function parseEventSourceStreamAsAsyncIterable({ stream, schema, }) {
5
+ const queue = new AsyncQueue();
6
+ // process the stream asynchonously (no 'await' on purpose):
7
+ parseEventSourceStream({ stream })
8
+ .then(async (events) => {
9
+ try {
10
+ for await (const event of events) {
11
+ const data = event.data;
12
+ if (data === "[DONE]") {
13
+ queue.close();
14
+ return;
15
+ }
16
+ const parseResult = safeParseJSON({
17
+ text: data,
18
+ schema,
19
+ });
20
+ if (!parseResult.success) {
21
+ queue.push({
22
+ type: "error",
23
+ error: parseResult.error,
24
+ });
25
+ // Note: the queue is not closed on purpose. Some providers might add additional
26
+ // chunks that are not parsable, and ModelFusion should be resilient to that.
27
+ continue;
28
+ }
29
+ const completionChunk = parseResult.data;
30
+ queue.push({
31
+ type: "delta",
32
+ deltaValue: completionChunk,
33
+ });
34
+ }
35
+ }
36
+ catch (error) {
37
+ queue.push({ type: "error", error });
38
+ queue.close();
39
+ return;
40
+ }
41
+ })
42
+ .catch((error) => {
43
+ queue.push({ type: "error", error });
44
+ queue.close();
45
+ return;
46
+ });
47
+ return queue;
48
+ }
@@ -0,0 +1,21 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.parseJsonStreamAsAsyncIterable = void 0;
4
+ const AsyncQueue_js_1 = require("../AsyncQueue.cjs");
5
+ const parseJsonStream_js_1 = require("./parseJsonStream.cjs");
6
+ async function parseJsonStreamAsAsyncIterable({ stream, schema, }) {
7
+ const queue = new AsyncQueue_js_1.AsyncQueue();
8
+ // process the stream asynchonously (no 'await' on purpose):
9
+ (0, parseJsonStream_js_1.parseJsonStream)({
10
+ stream,
11
+ schema,
12
+ process(event) {
13
+ queue.push({ type: "delta", deltaValue: event });
14
+ },
15
+ onDone() {
16
+ queue.close();
17
+ },
18
+ });
19
+ return queue;
20
+ }
21
+ exports.parseJsonStreamAsAsyncIterable = parseJsonStreamAsAsyncIterable;
@@ -0,0 +1,6 @@
1
+ import { Schema } from "../../core/schema/Schema.js";
2
+ import { Delta } from "../../model-function/Delta.js";
3
+ export declare function parseJsonStreamAsAsyncIterable<T>({ stream, schema, }: {
4
+ stream: ReadableStream<Uint8Array>;
5
+ schema: Schema<T>;
6
+ }): Promise<AsyncIterable<Delta<T>>>;
@@ -0,0 +1,17 @@
1
+ import { AsyncQueue } from "../AsyncQueue.js";
2
+ import { parseJsonStream } from "./parseJsonStream.js";
3
+ export async function parseJsonStreamAsAsyncIterable({ stream, schema, }) {
4
+ const queue = new AsyncQueue();
5
+ // process the stream asynchonously (no 'await' on purpose):
6
+ parseJsonStream({
7
+ stream,
8
+ schema,
9
+ process(event) {
10
+ queue.push({ type: "delta", deltaValue: event });
11
+ },
12
+ onDone() {
13
+ queue.close();
14
+ },
15
+ });
16
+ return queue;
17
+ }