modelfusion 0.104.0 → 0.106.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (292) hide show
  1. package/CHANGELOG.md +60 -0
  2. package/README.md +8 -10
  3. package/core/DefaultRun.cjs +0 -4
  4. package/core/DefaultRun.d.ts +0 -2
  5. package/core/DefaultRun.js +0 -4
  6. package/core/ExtensionFunctionEvent.d.ts +11 -0
  7. package/core/FunctionEvent.d.ts +2 -2
  8. package/extension/index.cjs +22 -3
  9. package/extension/index.d.ts +5 -1
  10. package/extension/index.js +4 -1
  11. package/index.cjs +0 -3
  12. package/index.d.ts +0 -3
  13. package/index.js +0 -3
  14. package/model-function/Delta.d.ts +1 -2
  15. package/model-function/executeStreamCall.cjs +6 -4
  16. package/model-function/executeStreamCall.d.ts +2 -2
  17. package/model-function/executeStreamCall.js +6 -4
  18. package/model-function/generate-speech/streamSpeech.cjs +1 -2
  19. package/model-function/generate-speech/streamSpeech.js +1 -2
  20. package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +25 -29
  21. package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +3 -1
  22. package/model-function/generate-structure/StructureFromTextStreamingModel.js +25 -29
  23. package/model-function/generate-structure/StructureGenerationModel.d.ts +2 -0
  24. package/model-function/generate-structure/jsonStructurePrompt.cjs +42 -6
  25. package/model-function/generate-structure/jsonStructurePrompt.d.ts +12 -1
  26. package/model-function/generate-structure/jsonStructurePrompt.js +42 -5
  27. package/model-function/generate-structure/streamStructure.cjs +7 -8
  28. package/model-function/generate-structure/streamStructure.d.ts +1 -1
  29. package/model-function/generate-structure/streamStructure.js +7 -8
  30. package/model-function/generate-text/PromptTemplateFullTextModel.cjs +35 -0
  31. package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +41 -0
  32. package/model-function/generate-text/PromptTemplateFullTextModel.js +31 -0
  33. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +2 -1
  34. package/model-function/generate-text/PromptTemplateTextGenerationModel.js +1 -1
  35. package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +3 -0
  36. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +2 -1
  37. package/model-function/generate-text/PromptTemplateTextStreamingModel.js +3 -0
  38. package/model-function/generate-text/TextGenerationModel.d.ts +2 -1
  39. package/model-function/generate-text/index.cjs +1 -0
  40. package/model-function/generate-text/index.d.ts +1 -0
  41. package/model-function/generate-text/index.js +1 -0
  42. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +2 -2
  43. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +1 -1
  44. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs +8 -5
  45. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.js +7 -4
  46. package/model-function/generate-text/prompt-template/ChatPrompt.cjs +42 -0
  47. package/model-function/generate-text/prompt-template/ChatPrompt.d.ts +27 -5
  48. package/model-function/generate-text/prompt-template/ChatPrompt.js +41 -1
  49. package/model-function/generate-text/prompt-template/{Content.cjs → ContentPart.cjs} +1 -1
  50. package/model-function/generate-text/prompt-template/ContentPart.d.ts +30 -0
  51. package/model-function/generate-text/prompt-template/{Content.js → ContentPart.js} +1 -1
  52. package/model-function/generate-text/prompt-template/InstructionPrompt.d.ts +3 -2
  53. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +8 -5
  54. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.d.ts +1 -1
  55. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +6 -3
  56. package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs +8 -4
  57. package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js +6 -2
  58. package/model-function/generate-text/prompt-template/TextPromptTemplate.cjs +8 -4
  59. package/model-function/generate-text/prompt-template/TextPromptTemplate.js +6 -2
  60. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +7 -3
  61. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +6 -2
  62. package/model-function/generate-text/prompt-template/index.cjs +1 -1
  63. package/model-function/generate-text/prompt-template/index.d.ts +1 -1
  64. package/model-function/generate-text/prompt-template/index.js +1 -1
  65. package/model-function/generate-text/streamText.cjs +27 -28
  66. package/model-function/generate-text/streamText.d.ts +1 -0
  67. package/model-function/generate-text/streamText.js +27 -28
  68. package/model-function/index.cjs +0 -1
  69. package/model-function/index.d.ts +0 -1
  70. package/model-function/index.js +0 -1
  71. package/model-provider/anthropic/AnthropicPromptTemplate.cjs +7 -3
  72. package/model-provider/anthropic/AnthropicPromptTemplate.js +5 -1
  73. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +8 -14
  74. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +11 -2
  75. package/model-provider/anthropic/AnthropicTextGenerationModel.js +8 -14
  76. package/model-provider/anthropic/AnthropicTextGenerationModel.test.cjs +44 -0
  77. package/model-provider/anthropic/AnthropicTextGenerationModel.test.js +42 -0
  78. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
  79. package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -44
  80. package/model-provider/cohere/CohereTextGenerationModel.d.ts +49 -15
  81. package/model-provider/cohere/CohereTextGenerationModel.js +7 -45
  82. package/model-provider/cohere/CohereTextGenerationModel.test.cjs +33 -0
  83. package/model-provider/cohere/CohereTextGenerationModel.test.js +31 -0
  84. package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +1 -2
  85. package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +1 -2
  86. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +6 -1
  87. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +6 -1
  88. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +7 -14
  89. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +171 -20
  90. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +8 -15
  91. package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.cjs +37 -0
  92. package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.js +35 -0
  93. package/model-provider/mistral/MistralChatModel.cjs +30 -104
  94. package/model-provider/mistral/MistralChatModel.d.ts +47 -14
  95. package/model-provider/mistral/MistralChatModel.js +30 -104
  96. package/model-provider/mistral/MistralChatModel.test.cjs +51 -0
  97. package/model-provider/mistral/MistralChatModel.test.js +49 -0
  98. package/model-provider/mistral/MistralPromptTemplate.cjs +11 -4
  99. package/model-provider/mistral/MistralPromptTemplate.js +9 -2
  100. package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +13 -13
  101. package/model-provider/ollama/OllamaChatModel.cjs +7 -43
  102. package/model-provider/ollama/OllamaChatModel.d.ts +67 -14
  103. package/model-provider/ollama/OllamaChatModel.js +8 -44
  104. package/model-provider/ollama/OllamaChatModel.test.cjs +27 -0
  105. package/model-provider/ollama/OllamaChatModel.test.js +25 -0
  106. package/model-provider/ollama/OllamaChatPromptTemplate.cjs +34 -4
  107. package/model-provider/ollama/OllamaChatPromptTemplate.js +34 -4
  108. package/model-provider/ollama/OllamaCompletionModel.cjs +22 -43
  109. package/model-provider/ollama/OllamaCompletionModel.d.ts +67 -10
  110. package/model-provider/ollama/OllamaCompletionModel.js +24 -45
  111. package/model-provider/ollama/OllamaCompletionModel.test.cjs +95 -13
  112. package/model-provider/ollama/OllamaCompletionModel.test.js +72 -13
  113. package/model-provider/openai/{chat/AbstractOpenAIChatModel.cjs → AbstractOpenAIChatModel.cjs} +71 -15
  114. package/model-provider/openai/{chat/AbstractOpenAIChatModel.d.ts → AbstractOpenAIChatModel.d.ts} +273 -19
  115. package/model-provider/openai/{chat/AbstractOpenAIChatModel.js → AbstractOpenAIChatModel.js} +71 -15
  116. package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.cjs → OpenAIChatFunctionCallStructureGenerationModel.cjs} +18 -2
  117. package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts → OpenAIChatFunctionCallStructureGenerationModel.d.ts} +41 -11
  118. package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.js → OpenAIChatFunctionCallStructureGenerationModel.js} +18 -2
  119. package/model-provider/openai/{chat/OpenAIChatMessage.d.ts → OpenAIChatMessage.d.ts} +3 -3
  120. package/model-provider/openai/{chat/OpenAIChatModel.cjs → OpenAIChatModel.cjs} +5 -5
  121. package/model-provider/openai/{chat/OpenAIChatModel.d.ts → OpenAIChatModel.d.ts} +12 -12
  122. package/model-provider/openai/{chat/OpenAIChatModel.js → OpenAIChatModel.js} +5 -5
  123. package/model-provider/openai/OpenAIChatModel.test.cjs +94 -0
  124. package/model-provider/openai/OpenAIChatModel.test.js +92 -0
  125. package/model-provider/openai/OpenAIChatPromptTemplate.cjs +114 -0
  126. package/model-provider/openai/{chat/OpenAIChatPromptTemplate.d.ts → OpenAIChatPromptTemplate.d.ts} +3 -3
  127. package/model-provider/openai/OpenAIChatPromptTemplate.js +107 -0
  128. package/model-provider/openai/OpenAICompletionModel.cjs +32 -84
  129. package/model-provider/openai/OpenAICompletionModel.d.ts +27 -10
  130. package/model-provider/openai/OpenAICompletionModel.js +33 -85
  131. package/model-provider/openai/OpenAICompletionModel.test.cjs +53 -0
  132. package/model-provider/openai/OpenAICompletionModel.test.js +51 -0
  133. package/model-provider/openai/OpenAIFacade.cjs +2 -2
  134. package/model-provider/openai/OpenAIFacade.d.ts +3 -3
  135. package/model-provider/openai/OpenAIFacade.js +2 -2
  136. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +12 -12
  137. package/model-provider/openai/OpenAITranscriptionModel.d.ts +17 -17
  138. package/model-provider/openai/TikTokenTokenizer.d.ts +1 -1
  139. package/model-provider/openai/{chat/countOpenAIChatMessageTokens.cjs → countOpenAIChatMessageTokens.cjs} +2 -2
  140. package/model-provider/openai/{chat/countOpenAIChatMessageTokens.js → countOpenAIChatMessageTokens.js} +2 -2
  141. package/model-provider/openai/index.cjs +6 -7
  142. package/model-provider/openai/index.d.ts +5 -7
  143. package/model-provider/openai/index.js +5 -6
  144. package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +4 -4
  145. package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +6 -6
  146. package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +4 -4
  147. package/model-provider/stability/StabilityImageGenerationModel.d.ts +5 -5
  148. package/package.json +13 -24
  149. package/test/JsonTestServer.cjs +33 -0
  150. package/test/JsonTestServer.d.ts +7 -0
  151. package/test/JsonTestServer.js +29 -0
  152. package/test/StreamingTestServer.cjs +55 -0
  153. package/test/StreamingTestServer.d.ts +7 -0
  154. package/test/StreamingTestServer.js +51 -0
  155. package/test/arrayFromAsync.cjs +13 -0
  156. package/test/arrayFromAsync.d.ts +1 -0
  157. package/test/arrayFromAsync.js +9 -0
  158. package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +1 -1
  159. package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
  160. package/tool/generate-tool-call/TextGenerationToolCallModel.js +1 -1
  161. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +1 -11
  162. package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.d.ts +12 -0
  163. package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js +1 -0
  164. package/tool/generate-tool-calls-or-text/index.cjs +1 -0
  165. package/tool/generate-tool-calls-or-text/index.d.ts +1 -0
  166. package/tool/generate-tool-calls-or-text/index.js +1 -0
  167. package/util/index.cjs +0 -1
  168. package/util/index.d.ts +0 -1
  169. package/util/index.js +0 -1
  170. package/util/streaming/createEventSourceResponseHandler.cjs +9 -0
  171. package/util/streaming/createEventSourceResponseHandler.d.ts +4 -0
  172. package/util/streaming/createEventSourceResponseHandler.js +5 -0
  173. package/util/streaming/createJsonStreamResponseHandler.cjs +9 -0
  174. package/util/streaming/createJsonStreamResponseHandler.d.ts +4 -0
  175. package/util/streaming/createJsonStreamResponseHandler.js +5 -0
  176. package/util/streaming/parseEventSourceStreamAsAsyncIterable.cjs +52 -0
  177. package/util/streaming/parseEventSourceStreamAsAsyncIterable.d.ts +6 -0
  178. package/util/streaming/parseEventSourceStreamAsAsyncIterable.js +48 -0
  179. package/util/streaming/parseJsonStreamAsAsyncIterable.cjs +21 -0
  180. package/util/streaming/parseJsonStreamAsAsyncIterable.d.ts +6 -0
  181. package/util/streaming/parseJsonStreamAsAsyncIterable.js +17 -0
  182. package/browser/MediaSourceAppender.cjs +0 -54
  183. package/browser/MediaSourceAppender.d.ts +0 -11
  184. package/browser/MediaSourceAppender.js +0 -50
  185. package/browser/convertAudioChunksToBase64.cjs +0 -8
  186. package/browser/convertAudioChunksToBase64.d.ts +0 -4
  187. package/browser/convertAudioChunksToBase64.js +0 -4
  188. package/browser/convertBlobToBase64.cjs +0 -23
  189. package/browser/convertBlobToBase64.d.ts +0 -1
  190. package/browser/convertBlobToBase64.js +0 -19
  191. package/browser/index.cjs +0 -22
  192. package/browser/index.d.ts +0 -6
  193. package/browser/index.js +0 -6
  194. package/browser/invokeFlow.cjs +0 -23
  195. package/browser/invokeFlow.d.ts +0 -8
  196. package/browser/invokeFlow.js +0 -19
  197. package/browser/readEventSource.cjs +0 -29
  198. package/browser/readEventSource.d.ts +0 -9
  199. package/browser/readEventSource.js +0 -25
  200. package/browser/readEventSourceStream.cjs +0 -35
  201. package/browser/readEventSourceStream.d.ts +0 -7
  202. package/browser/readEventSourceStream.js +0 -31
  203. package/composed-function/index.cjs +0 -19
  204. package/composed-function/index.d.ts +0 -3
  205. package/composed-function/index.js +0 -3
  206. package/composed-function/summarize/SummarizationFunction.d.ts +0 -4
  207. package/composed-function/summarize/summarizeRecursively.cjs +0 -19
  208. package/composed-function/summarize/summarizeRecursively.d.ts +0 -11
  209. package/composed-function/summarize/summarizeRecursively.js +0 -15
  210. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +0 -25
  211. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +0 -24
  212. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +0 -21
  213. package/cost/Cost.cjs +0 -38
  214. package/cost/Cost.d.ts +0 -16
  215. package/cost/Cost.js +0 -34
  216. package/cost/CostCalculator.d.ts +0 -8
  217. package/cost/calculateCost.cjs +0 -28
  218. package/cost/calculateCost.d.ts +0 -7
  219. package/cost/calculateCost.js +0 -24
  220. package/cost/index.cjs +0 -19
  221. package/cost/index.d.ts +0 -3
  222. package/cost/index.js +0 -3
  223. package/guard/GuardEvent.cjs +0 -2
  224. package/guard/GuardEvent.d.ts +0 -7
  225. package/guard/fixStructure.cjs +0 -75
  226. package/guard/fixStructure.d.ts +0 -64
  227. package/guard/fixStructure.js +0 -71
  228. package/guard/guard.cjs +0 -79
  229. package/guard/guard.d.ts +0 -29
  230. package/guard/guard.js +0 -75
  231. package/guard/index.cjs +0 -19
  232. package/guard/index.d.ts +0 -3
  233. package/guard/index.js +0 -3
  234. package/model-function/SuccessfulModelCall.cjs +0 -10
  235. package/model-function/SuccessfulModelCall.d.ts +0 -12
  236. package/model-function/SuccessfulModelCall.js +0 -6
  237. package/model-function/generate-text/prompt-template/Content.d.ts +0 -25
  238. package/model-provider/openai/OpenAICostCalculator.cjs +0 -89
  239. package/model-provider/openai/OpenAICostCalculator.d.ts +0 -6
  240. package/model-provider/openai/OpenAICostCalculator.js +0 -85
  241. package/model-provider/openai/chat/OpenAIChatModel.test.cjs +0 -61
  242. package/model-provider/openai/chat/OpenAIChatModel.test.js +0 -59
  243. package/model-provider/openai/chat/OpenAIChatPromptTemplate.cjs +0 -70
  244. package/model-provider/openai/chat/OpenAIChatPromptTemplate.js +0 -63
  245. package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +0 -156
  246. package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +0 -19
  247. package/model-provider/openai/chat/OpenAIChatStreamIterable.js +0 -152
  248. package/server/fastify/AssetStorage.cjs +0 -2
  249. package/server/fastify/AssetStorage.d.ts +0 -17
  250. package/server/fastify/DefaultFlow.cjs +0 -22
  251. package/server/fastify/DefaultFlow.d.ts +0 -16
  252. package/server/fastify/DefaultFlow.js +0 -18
  253. package/server/fastify/FileSystemAssetStorage.cjs +0 -60
  254. package/server/fastify/FileSystemAssetStorage.d.ts +0 -19
  255. package/server/fastify/FileSystemAssetStorage.js +0 -56
  256. package/server/fastify/FileSystemLogger.cjs +0 -49
  257. package/server/fastify/FileSystemLogger.d.ts +0 -18
  258. package/server/fastify/FileSystemLogger.js +0 -45
  259. package/server/fastify/Flow.cjs +0 -2
  260. package/server/fastify/Flow.d.ts +0 -9
  261. package/server/fastify/FlowRun.cjs +0 -71
  262. package/server/fastify/FlowRun.d.ts +0 -28
  263. package/server/fastify/FlowRun.js +0 -67
  264. package/server/fastify/FlowSchema.cjs +0 -2
  265. package/server/fastify/FlowSchema.d.ts +0 -5
  266. package/server/fastify/Logger.cjs +0 -2
  267. package/server/fastify/Logger.d.ts +0 -13
  268. package/server/fastify/PathProvider.cjs +0 -34
  269. package/server/fastify/PathProvider.d.ts +0 -12
  270. package/server/fastify/PathProvider.js +0 -30
  271. package/server/fastify/index.cjs +0 -24
  272. package/server/fastify/index.d.ts +0 -8
  273. package/server/fastify/index.js +0 -8
  274. package/server/fastify/modelFusionFlowPlugin.cjs +0 -103
  275. package/server/fastify/modelFusionFlowPlugin.d.ts +0 -12
  276. package/server/fastify/modelFusionFlowPlugin.js +0 -99
  277. package/util/getAudioFileExtension.cjs +0 -29
  278. package/util/getAudioFileExtension.d.ts +0 -1
  279. package/util/getAudioFileExtension.js +0 -25
  280. /package/{composed-function/summarize/SummarizationFunction.cjs → core/ExtensionFunctionEvent.cjs} +0 -0
  281. /package/{composed-function/summarize/SummarizationFunction.js → core/ExtensionFunctionEvent.js} +0 -0
  282. /package/{cost/CostCalculator.js → model-provider/anthropic/AnthropicTextGenerationModel.test.d.ts} +0 -0
  283. /package/{guard/GuardEvent.js → model-provider/cohere/CohereTextGenerationModel.test.d.ts} +0 -0
  284. /package/model-provider/{openai/chat/OpenAIChatModel.test.d.ts → llamacpp/LlamaCppTextGenerationModel.test.d.ts} +0 -0
  285. /package/{server/fastify/AssetStorage.js → model-provider/mistral/MistralChatModel.test.d.ts} +0 -0
  286. /package/{server/fastify/Flow.js → model-provider/ollama/OllamaChatModel.test.d.ts} +0 -0
  287. /package/model-provider/openai/{chat/OpenAIChatMessage.cjs → OpenAIChatMessage.cjs} +0 -0
  288. /package/model-provider/openai/{chat/OpenAIChatMessage.js → OpenAIChatMessage.js} +0 -0
  289. /package/{server/fastify/FlowSchema.js → model-provider/openai/OpenAIChatModel.test.d.ts} +0 -0
  290. /package/{server/fastify/Logger.js → model-provider/openai/OpenAICompletionModel.test.d.ts} +0 -0
  291. /package/model-provider/openai/{chat/countOpenAIChatMessageTokens.d.ts → countOpenAIChatMessageTokens.d.ts} +0 -0
  292. /package/{cost/CostCalculator.cjs → tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.cjs} +0 -0
package/guard/guard.cjs DELETED
@@ -1,79 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.guard = void 0;
4
- const executeFunctionCall_js_1 = require("../core/executeFunctionCall.cjs");
5
- async function guard(execute, input, guards, options) {
6
- const guardList = Array.isArray(guards) ? guards : [guards];
7
- const maxAttempts = options?.maxAttempts ?? 2;
8
- return (0, executeFunctionCall_js_1.executeFunctionCall)({
9
- options,
10
- input,
11
- functionType: "guard",
12
- execute: async (options) => {
13
- let attempts = 0;
14
- while (attempts < maxAttempts) {
15
- let result;
16
- try {
17
- result = {
18
- type: "value",
19
- input,
20
- output: await execute(input, options),
21
- };
22
- }
23
- catch (error) {
24
- result = {
25
- type: "error",
26
- input,
27
- error,
28
- };
29
- }
30
- let isValid = true;
31
- for (const guard of guardList) {
32
- const guardResult = await guard(result);
33
- if (guardResult === undefined) {
34
- continue;
35
- }
36
- switch (guardResult.action) {
37
- case "passThrough": {
38
- break;
39
- }
40
- case "retry": {
41
- input = guardResult.input;
42
- isValid = false;
43
- break;
44
- }
45
- case "return": {
46
- result = {
47
- type: "value",
48
- input,
49
- output: guardResult.output,
50
- };
51
- break;
52
- }
53
- case "throwError": {
54
- result = {
55
- type: "error",
56
- input,
57
- error: guardResult.error,
58
- };
59
- break;
60
- }
61
- }
62
- }
63
- if (isValid) {
64
- if (result.type === "value") {
65
- return result.output;
66
- }
67
- else {
68
- throw result.error;
69
- }
70
- }
71
- attempts++;
72
- }
73
- // TODO dedicated error type
74
- throw new Error(`Maximum attempts of ${maxAttempts} reached ` +
75
- `without producing a valid output or handling an error.`);
76
- },
77
- });
78
- }
79
- exports.guard = guard;
package/guard/guard.d.ts DELETED
@@ -1,29 +0,0 @@
1
- import { FunctionOptions } from "../core/FunctionOptions.js";
2
- type OutputResult<INPUT, OUTPUT> = {
3
- type: "value";
4
- input: INPUT;
5
- output: OUTPUT;
6
- error?: undefined;
7
- } | {
8
- type: "error";
9
- input: INPUT;
10
- output?: undefined;
11
- error: unknown;
12
- };
13
- export type OutputValidator<INPUT, OUTPUT> = ({ type, input, output, error, }: OutputResult<INPUT, OUTPUT>) => PromiseLike<boolean>;
14
- export type Guard<INPUT, OUTPUT> = ({ type, input, output, error, }: OutputResult<INPUT, OUTPUT>) => PromiseLike<{
15
- action: "retry";
16
- input: INPUT;
17
- } | {
18
- action: "return";
19
- output: OUTPUT;
20
- } | {
21
- action: "throwError";
22
- error: unknown;
23
- } | {
24
- action: "passThrough";
25
- } | undefined>;
26
- export declare function guard<INPUT, OUTPUT>(execute: (input: INPUT, options?: FunctionOptions) => PromiseLike<OUTPUT>, input: INPUT, guards: Guard<INPUT, OUTPUT> | Array<Guard<INPUT, OUTPUT>>, options?: FunctionOptions & {
27
- maxAttempts: number;
28
- }): Promise<OUTPUT | undefined>;
29
- export {};
package/guard/guard.js DELETED
@@ -1,75 +0,0 @@
1
- import { executeFunctionCall } from "../core/executeFunctionCall.js";
2
- export async function guard(execute, input, guards, options) {
3
- const guardList = Array.isArray(guards) ? guards : [guards];
4
- const maxAttempts = options?.maxAttempts ?? 2;
5
- return executeFunctionCall({
6
- options,
7
- input,
8
- functionType: "guard",
9
- execute: async (options) => {
10
- let attempts = 0;
11
- while (attempts < maxAttempts) {
12
- let result;
13
- try {
14
- result = {
15
- type: "value",
16
- input,
17
- output: await execute(input, options),
18
- };
19
- }
20
- catch (error) {
21
- result = {
22
- type: "error",
23
- input,
24
- error,
25
- };
26
- }
27
- let isValid = true;
28
- for (const guard of guardList) {
29
- const guardResult = await guard(result);
30
- if (guardResult === undefined) {
31
- continue;
32
- }
33
- switch (guardResult.action) {
34
- case "passThrough": {
35
- break;
36
- }
37
- case "retry": {
38
- input = guardResult.input;
39
- isValid = false;
40
- break;
41
- }
42
- case "return": {
43
- result = {
44
- type: "value",
45
- input,
46
- output: guardResult.output,
47
- };
48
- break;
49
- }
50
- case "throwError": {
51
- result = {
52
- type: "error",
53
- input,
54
- error: guardResult.error,
55
- };
56
- break;
57
- }
58
- }
59
- }
60
- if (isValid) {
61
- if (result.type === "value") {
62
- return result.output;
63
- }
64
- else {
65
- throw result.error;
66
- }
67
- }
68
- attempts++;
69
- }
70
- // TODO dedicated error type
71
- throw new Error(`Maximum attempts of ${maxAttempts} reached ` +
72
- `without producing a valid output or handling an error.`);
73
- },
74
- });
75
- }
package/guard/index.cjs DELETED
@@ -1,19 +0,0 @@
1
- "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
- for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
- };
16
- Object.defineProperty(exports, "__esModule", { value: true });
17
- __exportStar(require("./GuardEvent.cjs"), exports);
18
- __exportStar(require("./fixStructure.cjs"), exports);
19
- __exportStar(require("./guard.cjs"), exports);
package/guard/index.d.ts DELETED
@@ -1,3 +0,0 @@
1
- export * from "./GuardEvent.js";
2
- export * from "./fixStructure.js";
3
- export * from "./guard.js";
package/guard/index.js DELETED
@@ -1,3 +0,0 @@
1
- export * from "./GuardEvent.js";
2
- export * from "./fixStructure.js";
3
- export * from "./guard.js";
@@ -1,10 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.extractSuccessfulModelCalls = void 0;
4
- function extractSuccessfulModelCalls(runFunctionEvents) {
5
- return runFunctionEvents.filter((event) => "model" in event &&
6
- "result" in event &&
7
- "status" in event.result &&
8
- event.result.status === "success");
9
- }
10
- exports.extractSuccessfulModelCalls = extractSuccessfulModelCalls;
@@ -1,12 +0,0 @@
1
- import { FunctionEvent } from "../core/FunctionEvent.js";
2
- import { ModelCallFinishedEvent } from "./ModelCallEvent.js";
3
- export type SuccessfulModelCall = ModelCallFinishedEvent & {
4
- result: {
5
- status: "success";
6
- };
7
- };
8
- export declare function extractSuccessfulModelCalls(runFunctionEvents: FunctionEvent[]): (ModelCallFinishedEvent & {
9
- result: {
10
- status: "success";
11
- };
12
- })[];
@@ -1,6 +0,0 @@
1
- export function extractSuccessfulModelCalls(runFunctionEvents) {
2
- return runFunctionEvents.filter((event) => "model" in event &&
3
- "result" in event &&
4
- "status" in event.result &&
5
- event.result.status === "success");
6
- }
@@ -1,25 +0,0 @@
1
- /**
2
- * Content can either be a simple text content (`string`) or a
3
- * complex multi-modal content that is a mix of text parts and
4
- * image parts.
5
- */
6
- export type Content = string | Array<TextPart | ImagePart>;
7
- export interface TextPart {
8
- type: "text";
9
- /**
10
- * The text content.
11
- */
12
- text: string;
13
- }
14
- export interface ImagePart {
15
- type: "image";
16
- /**
17
- * Base-64 encoded image.
18
- */
19
- base64Image: string;
20
- /**
21
- * Optional mime type of the image.
22
- */
23
- mimeType?: string;
24
- }
25
- export declare function validateContentIsString(content: Content, prompt: unknown): string;
@@ -1,89 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.OpenAICostCalculator = void 0;
4
- const OpenAICompletionModel_js_1 = require("./OpenAICompletionModel.cjs");
5
- const OpenAIImageGenerationModel_js_1 = require("./OpenAIImageGenerationModel.cjs");
6
- const OpenAISpeechModel_js_1 = require("./OpenAISpeechModel.cjs");
7
- const OpenAITextEmbeddingModel_js_1 = require("./OpenAITextEmbeddingModel.cjs");
8
- const OpenAITranscriptionModel_js_1 = require("./OpenAITranscriptionModel.cjs");
9
- const OpenAIChatModel_js_1 = require("./chat/OpenAIChatModel.cjs");
10
- class OpenAICostCalculator {
11
- constructor() {
12
- Object.defineProperty(this, "provider", {
13
- enumerable: true,
14
- configurable: true,
15
- writable: true,
16
- value: "openai"
17
- });
18
- }
19
- async calculateCostInMillicents(call) {
20
- const type = call.functionType;
21
- const model = call.model.modelName;
22
- switch (type) {
23
- case "generate-image": {
24
- if (model == null) {
25
- return null;
26
- }
27
- return (0, OpenAIImageGenerationModel_js_1.calculateOpenAIImageGenerationCostInMillicents)({
28
- model: model,
29
- settings: call.settings,
30
- });
31
- }
32
- case "embed": {
33
- if (model == null) {
34
- return null;
35
- }
36
- if ((0, OpenAITextEmbeddingModel_js_1.isOpenAIEmbeddingModel)(model)) {
37
- const responses = Array.isArray(call.result.response)
38
- ? call.result.response
39
- : [call.result.response];
40
- return (0, OpenAITextEmbeddingModel_js_1.calculateOpenAIEmbeddingCostInMillicents)({
41
- model,
42
- responses,
43
- });
44
- }
45
- break;
46
- }
47
- case "generate-structure":
48
- case "generate-text": {
49
- if (model == null) {
50
- return null;
51
- }
52
- if ((0, OpenAIChatModel_js_1.isOpenAIChatModel)(model)) {
53
- return (0, OpenAIChatModel_js_1.calculateOpenAIChatCostInMillicents)({
54
- model,
55
- response: call.result.response,
56
- });
57
- }
58
- if ((0, OpenAICompletionModel_js_1.isOpenAICompletionModel)(model)) {
59
- return (0, OpenAICompletionModel_js_1.calculateOpenAICompletionCostInMillicents)({
60
- model,
61
- response: call.result.response,
62
- });
63
- }
64
- break;
65
- }
66
- case "generate-transcription": {
67
- if (model == null) {
68
- return null;
69
- }
70
- return (0, OpenAITranscriptionModel_js_1.calculateOpenAITranscriptionCostInMillicents)({
71
- model: model,
72
- response: call.result
73
- .response,
74
- });
75
- }
76
- case "generate-speech": {
77
- if (model == null) {
78
- return null;
79
- }
80
- return (0, OpenAISpeechModel_js_1.calculateOpenAISpeechCostInMillicents)({
81
- model: model,
82
- input: call.input,
83
- });
84
- }
85
- }
86
- return null;
87
- }
88
- }
89
- exports.OpenAICostCalculator = OpenAICostCalculator;
@@ -1,6 +0,0 @@
1
- import { CostCalculator } from "../../cost/CostCalculator.js";
2
- import { SuccessfulModelCall } from "../../model-function/SuccessfulModelCall.js";
3
- export declare class OpenAICostCalculator implements CostCalculator {
4
- readonly provider = "openai";
5
- calculateCostInMillicents(call: SuccessfulModelCall): Promise<number | null>;
6
- }
@@ -1,85 +0,0 @@
1
- import { calculateOpenAICompletionCostInMillicents, isOpenAICompletionModel, } from "./OpenAICompletionModel.js";
2
- import { calculateOpenAIImageGenerationCostInMillicents, } from "./OpenAIImageGenerationModel.js";
3
- import { calculateOpenAISpeechCostInMillicents, } from "./OpenAISpeechModel.js";
4
- import { calculateOpenAIEmbeddingCostInMillicents, isOpenAIEmbeddingModel, } from "./OpenAITextEmbeddingModel.js";
5
- import { calculateOpenAITranscriptionCostInMillicents, } from "./OpenAITranscriptionModel.js";
6
- import { calculateOpenAIChatCostInMillicents, isOpenAIChatModel, } from "./chat/OpenAIChatModel.js";
7
- export class OpenAICostCalculator {
8
- constructor() {
9
- Object.defineProperty(this, "provider", {
10
- enumerable: true,
11
- configurable: true,
12
- writable: true,
13
- value: "openai"
14
- });
15
- }
16
- async calculateCostInMillicents(call) {
17
- const type = call.functionType;
18
- const model = call.model.modelName;
19
- switch (type) {
20
- case "generate-image": {
21
- if (model == null) {
22
- return null;
23
- }
24
- return calculateOpenAIImageGenerationCostInMillicents({
25
- model: model,
26
- settings: call.settings,
27
- });
28
- }
29
- case "embed": {
30
- if (model == null) {
31
- return null;
32
- }
33
- if (isOpenAIEmbeddingModel(model)) {
34
- const responses = Array.isArray(call.result.response)
35
- ? call.result.response
36
- : [call.result.response];
37
- return calculateOpenAIEmbeddingCostInMillicents({
38
- model,
39
- responses,
40
- });
41
- }
42
- break;
43
- }
44
- case "generate-structure":
45
- case "generate-text": {
46
- if (model == null) {
47
- return null;
48
- }
49
- if (isOpenAIChatModel(model)) {
50
- return calculateOpenAIChatCostInMillicents({
51
- model,
52
- response: call.result.response,
53
- });
54
- }
55
- if (isOpenAICompletionModel(model)) {
56
- return calculateOpenAICompletionCostInMillicents({
57
- model,
58
- response: call.result.response,
59
- });
60
- }
61
- break;
62
- }
63
- case "generate-transcription": {
64
- if (model == null) {
65
- return null;
66
- }
67
- return calculateOpenAITranscriptionCostInMillicents({
68
- model: model,
69
- response: call.result
70
- .response,
71
- });
72
- }
73
- case "generate-speech": {
74
- if (model == null) {
75
- return null;
76
- }
77
- return calculateOpenAISpeechCostInMillicents({
78
- model: model,
79
- input: call.input,
80
- });
81
- }
82
- }
83
- return null;
84
- }
85
- }
@@ -1,61 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- const msw_1 = require("msw");
4
- const node_1 = require("msw/node");
5
- const streamText_js_1 = require("../../../model-function/generate-text/streamText.cjs");
6
- const OpenAIChatModel_js_1 = require("./OpenAIChatModel.cjs");
7
- const OpenAIApiConfiguration_js_1 = require("../OpenAIApiConfiguration.cjs");
8
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
9
- let responseChunks = [];
10
- const server = (0, node_1.setupServer)(msw_1.http.post("https://api.openai.com/v1/chat/completions", () => {
11
- const encoder = new TextEncoder();
12
- const stream = new ReadableStream({
13
- async start(controller) {
14
- try {
15
- for (const chunk of responseChunks) {
16
- controller.enqueue(encoder.encode(chunk));
17
- }
18
- }
19
- finally {
20
- controller.close();
21
- }
22
- },
23
- });
24
- return new msw_1.HttpResponse(stream, {
25
- status: 200,
26
- headers: {
27
- "Content-Type": "text/event-stream",
28
- "Cache-Control": "no-cache",
29
- Connection: "keep-alive",
30
- },
31
- });
32
- }));
33
- beforeAll(() => server.listen());
34
- beforeEach(() => {
35
- responseChunks = [];
36
- });
37
- afterEach(() => server.resetHandlers());
38
- afterAll(() => server.close());
39
- describe("streamText", () => {
40
- it("should return only values from the first choice when using streamText", async () => {
41
- responseChunks = [
42
- `data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1702657020,"model":"gpt-3.5-turbo-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]}\n\n`,
43
- `data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1702657020,"model":"gpt-3.5-turbo-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"A"},"finish_reason":null}]}\n\n`,
44
- `data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1702657020,"model":"gpt-3.5-turbo-0613","system_fingerprint":null,"choices":[{"index":1,"delta":{"role":"assistant","content":""},"finish_reason":null}]}\n\n`,
45
- `data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1702657020,"model":"gpt-3.5-turbo-0613","system_fingerprint":null,"choices":[{"index":1,"delta":{"content":"B"},"finish_reason":null}]}\n\n`,
46
- `data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1702657020,"model":"gpt-3.5-turbo-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"finish_reason":"stop"}]}\n\n`,
47
- `data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1702657020,"model":"gpt-3.5-turbo-0613","system_fingerprint":null,"choices":[{"index":1,"delta":{},"finish_reason":"stop"}]}\n\n`,
48
- "data: [DONE]\n\n",
49
- ];
50
- const stream = await (0, streamText_js_1.streamText)(new OpenAIChatModel_js_1.OpenAIChatModel({
51
- api: new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration({ apiKey: "test" }),
52
- model: "gpt-3.5-turbo",
53
- numberOfGenerations: 2,
54
- }).withTextPrompt(), "test prompt");
55
- const chunks = [];
56
- for await (const part of stream) {
57
- chunks.push(part);
58
- }
59
- expect(chunks).toStrictEqual(["A"]);
60
- });
61
- });
@@ -1,59 +0,0 @@
1
- import { HttpResponse, http } from "msw";
2
- import { setupServer } from "msw/node";
3
- import { streamText } from "../../../model-function/generate-text/streamText.js";
4
- import { OpenAIChatModel } from "./OpenAIChatModel.js";
5
- import { OpenAIApiConfiguration } from "../OpenAIApiConfiguration.js";
6
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
7
- let responseChunks = [];
8
- const server = setupServer(http.post("https://api.openai.com/v1/chat/completions", () => {
9
- const encoder = new TextEncoder();
10
- const stream = new ReadableStream({
11
- async start(controller) {
12
- try {
13
- for (const chunk of responseChunks) {
14
- controller.enqueue(encoder.encode(chunk));
15
- }
16
- }
17
- finally {
18
- controller.close();
19
- }
20
- },
21
- });
22
- return new HttpResponse(stream, {
23
- status: 200,
24
- headers: {
25
- "Content-Type": "text/event-stream",
26
- "Cache-Control": "no-cache",
27
- Connection: "keep-alive",
28
- },
29
- });
30
- }));
31
- beforeAll(() => server.listen());
32
- beforeEach(() => {
33
- responseChunks = [];
34
- });
35
- afterEach(() => server.resetHandlers());
36
- afterAll(() => server.close());
37
- describe("streamText", () => {
38
- it("should return only values from the first choice when using streamText", async () => {
39
- responseChunks = [
40
- `data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1702657020,"model":"gpt-3.5-turbo-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]}\n\n`,
41
- `data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1702657020,"model":"gpt-3.5-turbo-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"A"},"finish_reason":null}]}\n\n`,
42
- `data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1702657020,"model":"gpt-3.5-turbo-0613","system_fingerprint":null,"choices":[{"index":1,"delta":{"role":"assistant","content":""},"finish_reason":null}]}\n\n`,
43
- `data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1702657020,"model":"gpt-3.5-turbo-0613","system_fingerprint":null,"choices":[{"index":1,"delta":{"content":"B"},"finish_reason":null}]}\n\n`,
44
- `data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1702657020,"model":"gpt-3.5-turbo-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"finish_reason":"stop"}]}\n\n`,
45
- `data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1702657020,"model":"gpt-3.5-turbo-0613","system_fingerprint":null,"choices":[{"index":1,"delta":{},"finish_reason":"stop"}]}\n\n`,
46
- "data: [DONE]\n\n",
47
- ];
48
- const stream = await streamText(new OpenAIChatModel({
49
- api: new OpenAIApiConfiguration({ apiKey: "test" }),
50
- model: "gpt-3.5-turbo",
51
- numberOfGenerations: 2,
52
- }).withTextPrompt(), "test prompt");
53
- const chunks = [];
54
- for await (const part of stream) {
55
- chunks.push(part);
56
- }
57
- expect(chunks).toStrictEqual(["A"]);
58
- });
59
- });
@@ -1,70 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.chat = exports.instruction = exports.text = exports.identity = void 0;
4
- const OpenAIChatMessage_js_1 = require("./OpenAIChatMessage.cjs");
5
- /**
6
- * OpenAIMessage[] identity chat format.
7
- */
8
- function identity() {
9
- return { format: (prompt) => prompt, stopSequences: [] };
10
- }
11
- exports.identity = identity;
12
- /**
13
- * Formats a text prompt as an OpenAI chat prompt.
14
- */
15
- function text() {
16
- return {
17
- format: (prompt) => [OpenAIChatMessage_js_1.OpenAIChatMessage.user(prompt)],
18
- stopSequences: [],
19
- };
20
- }
21
- exports.text = text;
22
- /**
23
- * Formats an instruction prompt as an OpenAI chat prompt.
24
- */
25
- function instruction() {
26
- return {
27
- format(prompt) {
28
- const messages = [];
29
- if (prompt.system != null) {
30
- messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.system(prompt.system));
31
- }
32
- messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.user(prompt.instruction));
33
- return messages;
34
- },
35
- stopSequences: [],
36
- };
37
- }
38
- exports.instruction = instruction;
39
- /**
40
- * Formats a chat prompt as an OpenAI chat prompt.
41
- */
42
- function chat() {
43
- return {
44
- format(prompt) {
45
- const messages = [];
46
- if (prompt.system != null) {
47
- messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.system(prompt.system));
48
- }
49
- for (const { role, content } of prompt.messages) {
50
- switch (role) {
51
- case "user": {
52
- messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.user(content));
53
- break;
54
- }
55
- case "assistant": {
56
- messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.assistant(content));
57
- break;
58
- }
59
- default: {
60
- const _exhaustiveCheck = role;
61
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
62
- }
63
- }
64
- }
65
- return messages;
66
- },
67
- stopSequences: [],
68
- };
69
- }
70
- exports.chat = chat;