modelfusion 0.109.0 → 0.111.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (230) hide show
  1. package/CHANGELOG.md +25 -0
  2. package/core/api/ApiFacade.cjs +20 -0
  3. package/core/api/ApiFacade.d.ts +4 -0
  4. package/core/api/ApiFacade.js +4 -0
  5. package/core/api/BaseUrlApiConfiguration.cjs +47 -5
  6. package/core/api/BaseUrlApiConfiguration.d.ts +23 -6
  7. package/core/api/BaseUrlApiConfiguration.js +45 -4
  8. package/core/api/BaseUrlApiConfiguration.test.cjs +11 -0
  9. package/core/api/BaseUrlApiConfiguration.test.d.ts +1 -0
  10. package/core/api/BaseUrlApiConfiguration.test.js +9 -0
  11. package/core/api/callWithRetryAndThrottle.cjs +3 -3
  12. package/core/api/callWithRetryAndThrottle.js +3 -3
  13. package/core/api/index.cjs +15 -2
  14. package/core/api/index.d.ts +2 -2
  15. package/core/api/index.js +2 -2
  16. package/core/api/postToApi.cjs +28 -5
  17. package/core/api/postToApi.d.ts +5 -4
  18. package/core/api/postToApi.js +26 -4
  19. package/core/api/throttleOff.cjs +8 -0
  20. package/core/api/throttleOff.d.ts +5 -0
  21. package/core/api/throttleOff.js +4 -0
  22. package/{extension → internal}/index.cjs +2 -7
  23. package/{extension → internal}/index.d.ts +1 -1
  24. package/{extension → internal}/index.js +1 -1
  25. package/model-function/generate-structure/generateStructure.d.ts +1 -1
  26. package/model-provider/anthropic/AnthropicApiConfiguration.cjs +14 -6
  27. package/model-provider/anthropic/AnthropicApiConfiguration.d.ts +7 -8
  28. package/model-provider/anthropic/AnthropicApiConfiguration.js +15 -7
  29. package/model-provider/anthropic/AnthropicError.cjs +7 -27
  30. package/model-provider/anthropic/AnthropicError.d.ts +21 -16
  31. package/model-provider/anthropic/AnthropicError.js +7 -25
  32. package/model-provider/anthropic/AnthropicFacade.cjs +10 -1
  33. package/model-provider/anthropic/AnthropicFacade.d.ts +9 -0
  34. package/model-provider/anthropic/AnthropicFacade.js +8 -0
  35. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +22 -24
  36. package/model-provider/anthropic/AnthropicTextGenerationModel.js +23 -25
  37. package/model-provider/anthropic/index.cjs +1 -4
  38. package/model-provider/anthropic/index.d.ts +1 -1
  39. package/model-provider/anthropic/index.js +0 -1
  40. package/model-provider/automatic1111/Automatic1111ApiConfiguration.cjs +12 -4
  41. package/model-provider/automatic1111/Automatic1111ApiConfiguration.d.ts +3 -3
  42. package/model-provider/automatic1111/Automatic1111ApiConfiguration.js +12 -4
  43. package/model-provider/automatic1111/Automatic1111Error.cjs +3 -3
  44. package/model-provider/automatic1111/Automatic1111Error.d.ts +13 -3
  45. package/model-provider/automatic1111/Automatic1111Error.js +4 -4
  46. package/model-provider/automatic1111/Automatic1111Facade.cjs +9 -9
  47. package/model-provider/automatic1111/Automatic1111Facade.d.ts +6 -6
  48. package/model-provider/automatic1111/Automatic1111Facade.js +7 -7
  49. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +11 -7
  50. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +11 -7
  51. package/model-provider/cohere/CohereApiConfiguration.cjs +14 -6
  52. package/model-provider/cohere/CohereApiConfiguration.d.ts +7 -8
  53. package/model-provider/cohere/CohereApiConfiguration.js +15 -7
  54. package/model-provider/cohere/CohereError.cjs +8 -43
  55. package/model-provider/cohere/CohereError.d.ts +9 -16
  56. package/model-provider/cohere/CohereError.js +8 -41
  57. package/model-provider/cohere/CohereFacade.cjs +12 -3
  58. package/model-provider/cohere/CohereFacade.d.ts +11 -2
  59. package/model-provider/cohere/CohereFacade.js +10 -2
  60. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +18 -22
  61. package/model-provider/cohere/CohereTextEmbeddingModel.js +18 -22
  62. package/model-provider/cohere/CohereTextGenerationModel.cjs +31 -39
  63. package/model-provider/cohere/CohereTextGenerationModel.d.ts +110 -8
  64. package/model-provider/cohere/CohereTextGenerationModel.js +31 -39
  65. package/model-provider/cohere/CohereTokenizer.cjs +32 -41
  66. package/model-provider/cohere/CohereTokenizer.d.ts +2 -2
  67. package/model-provider/cohere/CohereTokenizer.js +32 -41
  68. package/model-provider/cohere/index.cjs +1 -3
  69. package/model-provider/cohere/index.d.ts +1 -1
  70. package/model-provider/cohere/index.js +0 -1
  71. package/model-provider/elevenlabs/ElevenLabsApiConfiguration.cjs +14 -6
  72. package/model-provider/elevenlabs/ElevenLabsApiConfiguration.d.ts +7 -8
  73. package/model-provider/elevenlabs/ElevenLabsApiConfiguration.js +15 -7
  74. package/model-provider/elevenlabs/ElevenLabsFacade.cjs +10 -1
  75. package/model-provider/elevenlabs/ElevenLabsFacade.d.ts +9 -0
  76. package/model-provider/elevenlabs/ElevenLabsFacade.js +8 -0
  77. package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +42 -53
  78. package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +44 -55
  79. package/model-provider/huggingface/HuggingFaceApiConfiguration.cjs +14 -6
  80. package/model-provider/huggingface/HuggingFaceApiConfiguration.d.ts +7 -8
  81. package/model-provider/huggingface/HuggingFaceApiConfiguration.js +15 -7
  82. package/model-provider/huggingface/HuggingFaceError.cjs +7 -29
  83. package/model-provider/huggingface/HuggingFaceError.d.ts +9 -16
  84. package/model-provider/huggingface/HuggingFaceError.js +7 -27
  85. package/model-provider/huggingface/HuggingFaceFacade.cjs +10 -1
  86. package/model-provider/huggingface/HuggingFaceFacade.d.ts +9 -0
  87. package/model-provider/huggingface/HuggingFaceFacade.js +8 -0
  88. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +17 -27
  89. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +17 -27
  90. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +22 -23
  91. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +22 -23
  92. package/model-provider/huggingface/index.cjs +1 -3
  93. package/model-provider/huggingface/index.d.ts +1 -1
  94. package/model-provider/huggingface/index.js +0 -1
  95. package/model-provider/llamacpp/LlamaCppApiConfiguration.cjs +13 -6
  96. package/model-provider/llamacpp/LlamaCppApiConfiguration.d.ts +7 -9
  97. package/model-provider/llamacpp/LlamaCppApiConfiguration.js +14 -7
  98. package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +4 -4
  99. package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +173 -5
  100. package/model-provider/llamacpp/LlamaCppCompletionModel.js +4 -4
  101. package/model-provider/llamacpp/LlamaCppError.cjs +7 -27
  102. package/model-provider/llamacpp/LlamaCppError.d.ts +9 -16
  103. package/model-provider/llamacpp/LlamaCppError.js +7 -25
  104. package/model-provider/llamacpp/LlamaCppFacade.cjs +10 -2
  105. package/model-provider/llamacpp/LlamaCppFacade.d.ts +8 -1
  106. package/model-provider/llamacpp/LlamaCppFacade.js +8 -1
  107. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +10 -14
  108. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +10 -14
  109. package/model-provider/llamacpp/LlamaCppTokenizer.cjs +14 -18
  110. package/model-provider/llamacpp/LlamaCppTokenizer.js +14 -18
  111. package/model-provider/llamacpp/index.cjs +1 -3
  112. package/model-provider/llamacpp/index.d.ts +1 -1
  113. package/model-provider/llamacpp/index.js +0 -1
  114. package/model-provider/lmnt/LmntApiConfiguration.cjs +14 -6
  115. package/model-provider/lmnt/LmntApiConfiguration.d.ts +7 -8
  116. package/model-provider/lmnt/LmntApiConfiguration.js +15 -7
  117. package/model-provider/lmnt/LmntFacade.cjs +11 -2
  118. package/model-provider/lmnt/LmntFacade.d.ts +10 -1
  119. package/model-provider/lmnt/LmntFacade.js +9 -1
  120. package/model-provider/lmnt/LmntSpeechModel.cjs +53 -41
  121. package/model-provider/lmnt/LmntSpeechModel.d.ts +51 -3
  122. package/model-provider/lmnt/LmntSpeechModel.js +54 -42
  123. package/model-provider/mistral/MistralApiConfiguration.cjs +14 -6
  124. package/model-provider/mistral/MistralApiConfiguration.d.ts +9 -11
  125. package/model-provider/mistral/MistralApiConfiguration.js +15 -7
  126. package/model-provider/mistral/MistralChatModel.cjs +4 -4
  127. package/model-provider/mistral/MistralChatModel.d.ts +48 -3
  128. package/model-provider/mistral/MistralChatModel.js +5 -5
  129. package/model-provider/mistral/MistralError.cjs +3 -3
  130. package/model-provider/mistral/MistralError.d.ts +15 -3
  131. package/model-provider/mistral/MistralError.js +4 -4
  132. package/model-provider/mistral/MistralFacade.cjs +5 -1
  133. package/model-provider/mistral/MistralFacade.d.ts +10 -3
  134. package/model-provider/mistral/MistralFacade.js +6 -2
  135. package/model-provider/mistral/MistralTextEmbeddingModel.cjs +2 -1
  136. package/model-provider/mistral/MistralTextEmbeddingModel.js +2 -1
  137. package/model-provider/ollama/OllamaApiConfiguration.cjs +13 -6
  138. package/model-provider/ollama/OllamaApiConfiguration.d.ts +7 -10
  139. package/model-provider/ollama/OllamaApiConfiguration.js +14 -7
  140. package/model-provider/ollama/OllamaChatModel.cjs +4 -4
  141. package/model-provider/ollama/OllamaChatModel.d.ts +46 -5
  142. package/model-provider/ollama/OllamaChatModel.js +5 -5
  143. package/model-provider/ollama/OllamaCompletionModel.cjs +4 -4
  144. package/model-provider/ollama/OllamaCompletionModel.d.ts +40 -5
  145. package/model-provider/ollama/OllamaCompletionModel.js +5 -5
  146. package/model-provider/ollama/OllamaError.cjs +3 -3
  147. package/model-provider/ollama/OllamaError.d.ts +7 -3
  148. package/model-provider/ollama/OllamaError.js +4 -4
  149. package/model-provider/ollama/OllamaFacade.cjs +6 -2
  150. package/model-provider/ollama/OllamaFacade.d.ts +8 -3
  151. package/model-provider/ollama/OllamaFacade.js +6 -2
  152. package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +15 -16
  153. package/model-provider/ollama/OllamaTextEmbeddingModel.js +15 -16
  154. package/model-provider/openai/AbstractOpenAIChatModel.cjs +4 -4
  155. package/model-provider/openai/AbstractOpenAIChatModel.d.ts +148 -2
  156. package/model-provider/openai/AbstractOpenAIChatModel.js +4 -4
  157. package/model-provider/openai/AbstractOpenAICompletionModel.cjs +30 -32
  158. package/model-provider/openai/AbstractOpenAICompletionModel.js +30 -32
  159. package/model-provider/openai/AzureOpenAIApiConfiguration.d.ts +9 -8
  160. package/model-provider/openai/OpenAIApiConfiguration.cjs +14 -6
  161. package/model-provider/openai/OpenAIApiConfiguration.d.ts +7 -8
  162. package/model-provider/openai/OpenAIApiConfiguration.js +15 -7
  163. package/model-provider/openai/OpenAICompletionModel.cjs +3 -91
  164. package/model-provider/openai/OpenAICompletionModel.d.ts +3 -71
  165. package/model-provider/openai/OpenAICompletionModel.js +3 -91
  166. package/model-provider/openai/OpenAIError.cjs +8 -8
  167. package/model-provider/openai/OpenAIError.d.ts +27 -3
  168. package/model-provider/openai/OpenAIError.js +9 -9
  169. package/model-provider/openai/OpenAIFacade.cjs +23 -2
  170. package/model-provider/openai/OpenAIFacade.d.ts +20 -2
  171. package/model-provider/openai/OpenAIFacade.js +20 -1
  172. package/model-provider/openai/OpenAIImageGenerationModel.cjs +20 -21
  173. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +1 -1
  174. package/model-provider/openai/OpenAIImageGenerationModel.js +20 -21
  175. package/model-provider/openai/OpenAISpeechModel.cjs +17 -22
  176. package/model-provider/openai/OpenAISpeechModel.js +17 -22
  177. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +18 -23
  178. package/model-provider/openai/OpenAITextEmbeddingModel.js +18 -23
  179. package/model-provider/openai/OpenAITranscriptionModel.cjs +42 -48
  180. package/model-provider/openai/OpenAITranscriptionModel.d.ts +14 -10
  181. package/model-provider/openai/OpenAITranscriptionModel.js +42 -48
  182. package/model-provider/openai/TikTokenTokenizer.cjs +0 -18
  183. package/model-provider/openai/TikTokenTokenizer.d.ts +3 -3
  184. package/model-provider/openai/TikTokenTokenizer.js +0 -18
  185. package/model-provider/openai-compatible/FireworksAIApiConfiguration.cjs +11 -7
  186. package/model-provider/openai-compatible/FireworksAIApiConfiguration.d.ts +4 -9
  187. package/model-provider/openai-compatible/FireworksAIApiConfiguration.js +12 -8
  188. package/model-provider/openai-compatible/OpenAICompatibleFacade.cjs +25 -1
  189. package/model-provider/openai-compatible/OpenAICompatibleFacade.d.ts +23 -0
  190. package/model-provider/openai-compatible/OpenAICompatibleFacade.js +22 -0
  191. package/model-provider/openai-compatible/TogetherAIApiConfiguration.cjs +11 -7
  192. package/model-provider/openai-compatible/TogetherAIApiConfiguration.d.ts +4 -9
  193. package/model-provider/openai-compatible/TogetherAIApiConfiguration.js +12 -8
  194. package/model-provider/stability/StabilityApiConfiguration.cjs +13 -12
  195. package/model-provider/stability/StabilityApiConfiguration.d.ts +4 -4
  196. package/model-provider/stability/StabilityApiConfiguration.js +13 -12
  197. package/model-provider/stability/StabilityError.cjs +3 -3
  198. package/model-provider/stability/StabilityError.d.ts +7 -3
  199. package/model-provider/stability/StabilityError.js +4 -4
  200. package/model-provider/stability/StabilityFacade.cjs +9 -9
  201. package/model-provider/stability/StabilityFacade.d.ts +8 -8
  202. package/model-provider/stability/StabilityFacade.js +7 -7
  203. package/model-provider/stability/StabilityImageGenerationModel.cjs +2 -1
  204. package/model-provider/stability/StabilityImageGenerationModel.js +2 -1
  205. package/model-provider/whispercpp/WhisperCppApiConfiguration.cjs +13 -6
  206. package/model-provider/whispercpp/WhisperCppApiConfiguration.d.ts +7 -10
  207. package/model-provider/whispercpp/WhisperCppApiConfiguration.js +14 -7
  208. package/model-provider/whispercpp/WhisperCppFacade.cjs +9 -5
  209. package/model-provider/whispercpp/WhisperCppFacade.d.ts +7 -2
  210. package/model-provider/whispercpp/WhisperCppFacade.js +8 -4
  211. package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +5 -2
  212. package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +6 -3
  213. package/package.json +5 -5
  214. package/tool/WebSearchTool.cjs +2 -2
  215. package/tool/WebSearchTool.d.ts +1 -2
  216. package/tool/WebSearchTool.js +3 -3
  217. package/vector-index/memory/MemoryVectorIndex.cjs +2 -2
  218. package/vector-index/memory/MemoryVectorIndex.js +3 -3
  219. package/core/api/BaseUrlPartsApiConfiguration.cjs +0 -53
  220. package/core/api/BaseUrlPartsApiConfiguration.d.ts +0 -26
  221. package/core/api/BaseUrlPartsApiConfiguration.js +0 -49
  222. package/core/api/throttleUnlimitedConcurrency.cjs +0 -8
  223. package/core/api/throttleUnlimitedConcurrency.d.ts +0 -5
  224. package/core/api/throttleUnlimitedConcurrency.js +0 -4
  225. package/model-provider/elevenlabs/ElevenLabsError.cjs +0 -30
  226. package/model-provider/elevenlabs/ElevenLabsError.d.ts +0 -3
  227. package/model-provider/elevenlabs/ElevenLabsError.js +0 -26
  228. package/model-provider/lmnt/LmntError.cjs +0 -30
  229. package/model-provider/lmnt/LmntError.d.ts +0 -3
  230. package/model-provider/lmnt/LmntError.js +0 -26
@@ -1,6 +1,7 @@
1
1
  import { z } from "zod";
2
2
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
+ import { zodSchema } from "../../core/schema/ZodSchema.js";
4
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
6
  import { PromptTemplateImageGenerationModel } from "../../model-function/generate-image/PromptTemplateImageGenerationModel.js";
6
7
  import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
@@ -85,28 +86,26 @@ export class OpenAIImageGenerationModel extends AbstractModel {
85
86
  }
86
87
  async callAPI(prompt, options) {
87
88
  const api = this.settings.api ?? new OpenAIApiConfiguration();
88
- const abortSignal = options?.run?.abortSignal;
89
- const userId = options?.run?.userId;
90
- const responseFormat = options?.responseFormat;
89
+ const abortSignal = options.run?.abortSignal;
90
+ const userId = options.run?.userId;
91
+ const responseFormat = options.responseFormat;
91
92
  return callWithRetryAndThrottle({
92
93
  retry: api.retry,
93
94
  throttle: api.throttle,
94
- call: async () => {
95
- return postJsonToApi({
96
- url: api.assembleUrl("/images/generations"),
97
- headers: api.headers,
98
- body: {
99
- prompt,
100
- n: this.settings.numberOfGenerations,
101
- size: this.settings.size,
102
- response_format: responseFormat.type,
103
- user: this.settings.isUserIdForwardingEnabled ? userId : undefined,
104
- },
105
- failedResponseHandler: failedOpenAICallResponseHandler,
106
- successfulResponseHandler: responseFormat?.handler,
107
- abortSignal,
108
- });
109
- },
95
+ call: async () => postJsonToApi({
96
+ url: api.assembleUrl("/images/generations"),
97
+ headers: api.headers,
98
+ body: {
99
+ prompt,
100
+ n: this.settings.numberOfGenerations,
101
+ size: this.settings.size,
102
+ response_format: responseFormat.type,
103
+ user: this.settings.isUserIdForwardingEnabled ? userId : undefined,
104
+ },
105
+ failedResponseHandler: failedOpenAICallResponseHandler,
106
+ successfulResponseHandler: responseFormat.handler,
107
+ abortSignal,
108
+ }),
110
109
  });
111
110
  }
112
111
  get settingsForEvent() {
@@ -153,10 +152,10 @@ const openAIImageGenerationBase64JsonSchema = z.object({
153
152
  export const OpenAIImageGenerationResponseFormat = {
154
153
  url: {
155
154
  type: "url",
156
- handler: createJsonResponseHandler(openAIImageGenerationUrlSchema),
155
+ handler: createJsonResponseHandler(zodSchema(openAIImageGenerationUrlSchema)),
157
156
  },
158
157
  base64Json: {
159
158
  type: "b64_json",
160
- handler: createJsonResponseHandler(openAIImageGenerationBase64JsonSchema),
159
+ handler: createJsonResponseHandler(zodSchema(openAIImageGenerationBase64JsonSchema)),
161
160
  },
162
161
  };
@@ -46,13 +46,24 @@ class OpenAISpeechModel extends AbstractModel_js_1.AbstractModel {
46
46
  return this.settings.model;
47
47
  }
48
48
  async callAPI(text, options) {
49
+ const api = this.settings.api ?? new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration();
50
+ const abortSignal = options?.run?.abortSignal;
49
51
  return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
50
- retry: this.settings.api?.retry,
51
- throttle: this.settings.api?.throttle,
52
- call: async () => callOpenAITextToSpeechAPI({
53
- ...this.settings,
54
- abortSignal: options?.run?.abortSignal,
55
- text,
52
+ retry: api.retry,
53
+ throttle: api.throttle,
54
+ call: async () => (0, postToApi_js_1.postJsonToApi)({
55
+ url: api.assembleUrl(`/audio/speech`),
56
+ headers: api.headers,
57
+ body: {
58
+ input: text,
59
+ voice: this.settings.voice,
60
+ speed: this.settings.speed,
61
+ model: this.settings.model,
62
+ response_format: this.settings.responseFormat,
63
+ },
64
+ failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
65
+ successfulResponseHandler: (0, postToApi_js_1.createAudioMpegResponseHandler)(),
66
+ abortSignal,
56
67
  }),
57
68
  });
58
69
  }
@@ -75,19 +86,3 @@ class OpenAISpeechModel extends AbstractModel_js_1.AbstractModel {
75
86
  }
76
87
  }
77
88
  exports.OpenAISpeechModel = OpenAISpeechModel;
78
- async function callOpenAITextToSpeechAPI({ api = new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration(), abortSignal, text, voice, model, speed, responseFormat, }) {
79
- return (0, postToApi_js_1.postJsonToApi)({
80
- url: api.assembleUrl(`/audio/speech`),
81
- headers: api.headers,
82
- body: {
83
- input: text,
84
- voice,
85
- speed,
86
- model,
87
- response_format: responseFormat,
88
- },
89
- failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
90
- successfulResponseHandler: (0, postToApi_js_1.createAudioMpegResponseHandler)(),
91
- abortSignal,
92
- });
93
- }
@@ -42,13 +42,24 @@ export class OpenAISpeechModel extends AbstractModel {
42
42
  return this.settings.model;
43
43
  }
44
44
  async callAPI(text, options) {
45
+ const api = this.settings.api ?? new OpenAIApiConfiguration();
46
+ const abortSignal = options?.run?.abortSignal;
45
47
  return callWithRetryAndThrottle({
46
- retry: this.settings.api?.retry,
47
- throttle: this.settings.api?.throttle,
48
- call: async () => callOpenAITextToSpeechAPI({
49
- ...this.settings,
50
- abortSignal: options?.run?.abortSignal,
51
- text,
48
+ retry: api.retry,
49
+ throttle: api.throttle,
50
+ call: async () => postJsonToApi({
51
+ url: api.assembleUrl(`/audio/speech`),
52
+ headers: api.headers,
53
+ body: {
54
+ input: text,
55
+ voice: this.settings.voice,
56
+ speed: this.settings.speed,
57
+ model: this.settings.model,
58
+ response_format: this.settings.responseFormat,
59
+ },
60
+ failedResponseHandler: failedOpenAICallResponseHandler,
61
+ successfulResponseHandler: createAudioMpegResponseHandler(),
62
+ abortSignal,
52
63
  }),
53
64
  });
54
65
  }
@@ -70,19 +81,3 @@ export class OpenAISpeechModel extends AbstractModel {
70
81
  });
71
82
  }
72
83
  }
73
- async function callOpenAITextToSpeechAPI({ api = new OpenAIApiConfiguration(), abortSignal, text, voice, model, speed, responseFormat, }) {
74
- return postJsonToApi({
75
- url: api.assembleUrl(`/audio/speech`),
76
- headers: api.headers,
77
- body: {
78
- input: text,
79
- voice,
80
- speed,
81
- model,
82
- response_format: responseFormat,
83
- },
84
- failedResponseHandler: failedOpenAICallResponseHandler,
85
- successfulResponseHandler: createAudioMpegResponseHandler(),
86
- abortSignal,
87
- });
88
- }
@@ -4,6 +4,7 @@ exports.OpenAITextEmbeddingModel = exports.calculateOpenAIEmbeddingCostInMillice
4
4
  const zod_1 = require("zod");
5
5
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
+ const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
7
8
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
8
9
  const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
9
10
  const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
@@ -91,16 +92,24 @@ class OpenAITextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
91
92
  return (0, countTokens_js_1.countTokens)(this.tokenizer, input);
92
93
  }
93
94
  async callAPI(texts, options) {
95
+ const api = this.settings.api ?? new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration();
96
+ const abortSignal = options?.run?.abortSignal;
94
97
  return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
95
- retry: this.settings.api?.retry,
96
- throttle: this.settings.api?.throttle,
97
- call: async () => callOpenAITextEmbeddingAPI({
98
- ...this.settings,
99
- user: this.settings.isUserIdForwardingEnabled
100
- ? options?.run?.userId
101
- : undefined,
102
- abortSignal: options?.run?.abortSignal,
103
- input: texts,
98
+ retry: api.retry,
99
+ throttle: api.throttle,
100
+ call: async () => (0, postToApi_js_1.postJsonToApi)({
101
+ url: api.assembleUrl("/embeddings"),
102
+ headers: api.headers,
103
+ body: {
104
+ model: this.modelName,
105
+ input: texts,
106
+ user: this.settings.isUserIdForwardingEnabled
107
+ ? options?.run?.userId
108
+ : undefined,
109
+ },
110
+ failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
111
+ successfulResponseHandler: (0, postToApi_js_1.createJsonResponseHandler)((0, ZodSchema_js_1.zodSchema)(openAITextEmbeddingResponseSchema)),
112
+ abortSignal,
104
113
  }),
105
114
  });
106
115
  }
@@ -135,17 +144,3 @@ const openAITextEmbeddingResponseSchema = zod_1.z.object({
135
144
  total_tokens: zod_1.z.number(),
136
145
  }),
137
146
  });
138
- async function callOpenAITextEmbeddingAPI({ api = new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration(), abortSignal, model, input, user, }) {
139
- return (0, postToApi_js_1.postJsonToApi)({
140
- url: api.assembleUrl("/embeddings"),
141
- headers: api.headers,
142
- body: {
143
- model,
144
- input,
145
- user,
146
- },
147
- failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
148
- successfulResponseHandler: (0, postToApi_js_1.createJsonResponseHandler)(openAITextEmbeddingResponseSchema),
149
- abortSignal,
150
- });
151
- }
@@ -1,6 +1,7 @@
1
1
  import { z } from "zod";
2
2
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
+ import { zodSchema } from "../../core/schema/ZodSchema.js";
4
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
6
  import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
6
7
  import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
@@ -86,16 +87,24 @@ export class OpenAITextEmbeddingModel extends AbstractModel {
86
87
  return countTokens(this.tokenizer, input);
87
88
  }
88
89
  async callAPI(texts, options) {
90
+ const api = this.settings.api ?? new OpenAIApiConfiguration();
91
+ const abortSignal = options?.run?.abortSignal;
89
92
  return callWithRetryAndThrottle({
90
- retry: this.settings.api?.retry,
91
- throttle: this.settings.api?.throttle,
92
- call: async () => callOpenAITextEmbeddingAPI({
93
- ...this.settings,
94
- user: this.settings.isUserIdForwardingEnabled
95
- ? options?.run?.userId
96
- : undefined,
97
- abortSignal: options?.run?.abortSignal,
98
- input: texts,
93
+ retry: api.retry,
94
+ throttle: api.throttle,
95
+ call: async () => postJsonToApi({
96
+ url: api.assembleUrl("/embeddings"),
97
+ headers: api.headers,
98
+ body: {
99
+ model: this.modelName,
100
+ input: texts,
101
+ user: this.settings.isUserIdForwardingEnabled
102
+ ? options?.run?.userId
103
+ : undefined,
104
+ },
105
+ failedResponseHandler: failedOpenAICallResponseHandler,
106
+ successfulResponseHandler: createJsonResponseHandler(zodSchema(openAITextEmbeddingResponseSchema)),
107
+ abortSignal,
99
108
  }),
100
109
  });
101
110
  }
@@ -129,17 +138,3 @@ const openAITextEmbeddingResponseSchema = z.object({
129
138
  total_tokens: z.number(),
130
139
  }),
131
140
  });
132
- async function callOpenAITextEmbeddingAPI({ api = new OpenAIApiConfiguration(), abortSignal, model, input, user, }) {
133
- return postJsonToApi({
134
- url: api.assembleUrl("/embeddings"),
135
- headers: api.headers,
136
- body: {
137
- model,
138
- input,
139
- user,
140
- },
141
- failedResponseHandler: failedOpenAICallResponseHandler,
142
- successfulResponseHandler: createJsonResponseHandler(openAITextEmbeddingResponseSchema),
143
- abortSignal,
144
- });
145
- }
@@ -4,6 +4,7 @@ exports.OpenAITranscriptionResponseFormat = exports.OpenAITranscriptionModel = e
4
4
  const zod_1 = require("zod");
5
5
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
+ const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
7
8
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
8
9
  const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
9
10
  const OpenAIError_js_1 = require("./OpenAIError.cjs");
@@ -65,19 +66,46 @@ class OpenAITranscriptionModel extends AbstractModel_js_1.AbstractModel {
65
66
  };
66
67
  }
67
68
  async callAPI(data, options) {
69
+ const api = this.settings.api ?? new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration();
70
+ const abortSignal = options?.run?.abortSignal;
68
71
  return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
69
- retry: this.settings.api?.retry,
70
- throttle: this.settings.api?.throttle,
71
- call: async () => callOpenAITranscriptionAPI({
72
- ...this.settings,
73
- // other settings:
74
- abortSignal: options?.run?.abortSignal,
75
- file: {
76
- name: `audio.${data.type}`,
77
- data: data.data,
78
- },
79
- responseFormat: options?.responseFormat,
80
- }),
72
+ retry: api.retry,
73
+ throttle: api.throttle,
74
+ call: async () => {
75
+ const fileName = `audio.${data.type}`;
76
+ const formData = new FormData();
77
+ formData.append("file", new Blob([data.data]), fileName);
78
+ formData.append("model", this.settings.model);
79
+ if (this.settings.prompt != null) {
80
+ formData.append("prompt", this.settings.prompt);
81
+ }
82
+ if (options.responseFormat != null) {
83
+ formData.append("response_format", options.responseFormat.type);
84
+ }
85
+ if (this.settings.temperature != null) {
86
+ formData.append("temperature", this.settings.temperature.toString());
87
+ }
88
+ if (this.settings.language != null) {
89
+ formData.append("language", this.settings.language);
90
+ }
91
+ return (0, postToApi_js_1.postToApi)({
92
+ url: api.assembleUrl("/audio/transcriptions"),
93
+ headers: api.headers,
94
+ body: {
95
+ content: formData,
96
+ values: {
97
+ model: this.settings.model,
98
+ prompt: this.settings.prompt,
99
+ response_format: options.responseFormat,
100
+ temperature: this.settings.temperature,
101
+ language: this.settings.language,
102
+ },
103
+ },
104
+ failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
105
+ successfulResponseHandler: options.responseFormat.handler,
106
+ abortSignal,
107
+ });
108
+ },
81
109
  });
82
110
  }
83
111
  get settingsForEvent() {
@@ -91,40 +119,6 @@ class OpenAITranscriptionModel extends AbstractModel_js_1.AbstractModel {
91
119
  }
92
120
  }
93
121
  exports.OpenAITranscriptionModel = OpenAITranscriptionModel;
94
- async function callOpenAITranscriptionAPI({ api = new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration(), abortSignal, model, file, prompt, responseFormat, temperature, language, }) {
95
- const formData = new FormData();
96
- formData.append("file", new Blob([file.data]), file.name);
97
- formData.append("model", model);
98
- if (prompt) {
99
- formData.append("prompt", prompt);
100
- }
101
- if (responseFormat) {
102
- formData.append("response_format", responseFormat.type);
103
- }
104
- if (temperature) {
105
- formData.append("temperature", temperature.toString());
106
- }
107
- if (language) {
108
- formData.append("language", language);
109
- }
110
- return (0, postToApi_js_1.postToApi)({
111
- url: api.assembleUrl("/audio/transcriptions"),
112
- headers: api.headers,
113
- body: {
114
- content: formData,
115
- values: {
116
- model,
117
- prompt,
118
- response_format: responseFormat,
119
- temperature,
120
- language,
121
- },
122
- },
123
- failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
124
- successfulResponseHandler: responseFormat.handler,
125
- abortSignal,
126
- });
127
- }
128
122
  const openAITranscriptionJsonSchema = zod_1.z.object({
129
123
  text: zod_1.z.string(),
130
124
  });
@@ -150,11 +144,11 @@ const openAITranscriptionVerboseJsonSchema = zod_1.z.object({
150
144
  exports.OpenAITranscriptionResponseFormat = {
151
145
  json: {
152
146
  type: "json",
153
- handler: (0, postToApi_js_1.createJsonResponseHandler)(openAITranscriptionJsonSchema),
147
+ handler: (0, postToApi_js_1.createJsonResponseHandler)((0, ZodSchema_js_1.zodSchema)(openAITranscriptionJsonSchema)),
154
148
  },
155
149
  verboseJson: {
156
150
  type: "verbose_json",
157
- handler: (0, postToApi_js_1.createJsonResponseHandler)(openAITranscriptionVerboseJsonSchema),
151
+ handler: (0, postToApi_js_1.createJsonResponseHandler)((0, ZodSchema_js_1.zodSchema)(openAITranscriptionVerboseJsonSchema)),
158
152
  },
159
153
  text: {
160
154
  type: "text",
@@ -36,6 +36,10 @@ export interface OpenAITranscriptionModelSettings extends TranscriptionModelSett
36
36
  * increase the temperature until certain thresholds are hit.
37
37
  */
38
38
  temperature?: number;
39
+ /**
40
+ * An optional text to guide the model's style or continue a previous audio segment. The prompt should match the audio language.
41
+ */
42
+ prompt?: string;
39
43
  }
40
44
  export type OpenAITranscriptionInput = {
41
45
  type: "flac" | "m4a" | "mp3" | "mp4" | "mpeg" | "mpga" | "ogg" | "wav" | "webm";
@@ -64,16 +68,16 @@ export declare class OpenAITranscriptionModel extends AbstractModel<OpenAITransc
64
68
  doTranscribe(data: OpenAITranscriptionInput, options?: FunctionOptions): Promise<{
65
69
  response: {
66
70
  text: string;
71
+ duration: number;
67
72
  task: "transcribe";
68
73
  language: string;
69
- duration: number;
70
74
  segments: {
71
75
  text: string;
72
76
  id: number;
73
77
  temperature: number;
74
78
  tokens: number[];
75
- seek: number;
76
79
  start: number;
80
+ seek: number;
77
81
  end: number;
78
82
  avg_logprob: number;
79
83
  compression_ratio: number;
@@ -118,8 +122,8 @@ declare const openAITranscriptionVerboseJsonSchema: z.ZodObject<{
118
122
  id: number;
119
123
  temperature: number;
120
124
  tokens: number[];
121
- seek: number;
122
125
  start: number;
126
+ seek: number;
123
127
  end: number;
124
128
  avg_logprob: number;
125
129
  compression_ratio: number;
@@ -130,8 +134,8 @@ declare const openAITranscriptionVerboseJsonSchema: z.ZodObject<{
130
134
  id: number;
131
135
  temperature: number;
132
136
  tokens: number[];
133
- seek: number;
134
137
  start: number;
138
+ seek: number;
135
139
  end: number;
136
140
  avg_logprob: number;
137
141
  compression_ratio: number;
@@ -141,16 +145,16 @@ declare const openAITranscriptionVerboseJsonSchema: z.ZodObject<{
141
145
  text: z.ZodString;
142
146
  }, "strip", z.ZodTypeAny, {
143
147
  text: string;
148
+ duration: number;
144
149
  task: "transcribe";
145
150
  language: string;
146
- duration: number;
147
151
  segments: {
148
152
  text: string;
149
153
  id: number;
150
154
  temperature: number;
151
155
  tokens: number[];
152
- seek: number;
153
156
  start: number;
157
+ seek: number;
154
158
  end: number;
155
159
  avg_logprob: number;
156
160
  compression_ratio: number;
@@ -159,16 +163,16 @@ declare const openAITranscriptionVerboseJsonSchema: z.ZodObject<{
159
163
  }[];
160
164
  }, {
161
165
  text: string;
166
+ duration: number;
162
167
  task: "transcribe";
163
168
  language: string;
164
- duration: number;
165
169
  segments: {
166
170
  text: string;
167
171
  id: number;
168
172
  temperature: number;
169
173
  tokens: number[];
170
- seek: number;
171
174
  start: number;
175
+ seek: number;
172
176
  end: number;
173
177
  avg_logprob: number;
174
178
  compression_ratio: number;
@@ -192,16 +196,16 @@ export declare const OpenAITranscriptionResponseFormat: {
192
196
  type: "verbose_json";
193
197
  handler: ResponseHandler<{
194
198
  text: string;
199
+ duration: number;
195
200
  task: "transcribe";
196
201
  language: string;
197
- duration: number;
198
202
  segments: {
199
203
  text: string;
200
204
  id: number;
201
205
  temperature: number;
202
206
  tokens: number[];
203
- seek: number;
204
207
  start: number;
208
+ seek: number;
205
209
  end: number;
206
210
  avg_logprob: number;
207
211
  compression_ratio: number;
@@ -1,6 +1,7 @@
1
1
  import { z } from "zod";
2
2
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
3
3
  import { createJsonResponseHandler, createTextResponseHandler, postToApi, } from "../../core/api/postToApi.js";
4
+ import { zodSchema } from "../../core/schema/ZodSchema.js";
4
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
6
  import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
6
7
  import { failedOpenAICallResponseHandler } from "./OpenAIError.js";
@@ -61,19 +62,46 @@ export class OpenAITranscriptionModel extends AbstractModel {
61
62
  };
62
63
  }
63
64
  async callAPI(data, options) {
65
+ const api = this.settings.api ?? new OpenAIApiConfiguration();
66
+ const abortSignal = options?.run?.abortSignal;
64
67
  return callWithRetryAndThrottle({
65
- retry: this.settings.api?.retry,
66
- throttle: this.settings.api?.throttle,
67
- call: async () => callOpenAITranscriptionAPI({
68
- ...this.settings,
69
- // other settings:
70
- abortSignal: options?.run?.abortSignal,
71
- file: {
72
- name: `audio.${data.type}`,
73
- data: data.data,
74
- },
75
- responseFormat: options?.responseFormat,
76
- }),
68
+ retry: api.retry,
69
+ throttle: api.throttle,
70
+ call: async () => {
71
+ const fileName = `audio.${data.type}`;
72
+ const formData = new FormData();
73
+ formData.append("file", new Blob([data.data]), fileName);
74
+ formData.append("model", this.settings.model);
75
+ if (this.settings.prompt != null) {
76
+ formData.append("prompt", this.settings.prompt);
77
+ }
78
+ if (options.responseFormat != null) {
79
+ formData.append("response_format", options.responseFormat.type);
80
+ }
81
+ if (this.settings.temperature != null) {
82
+ formData.append("temperature", this.settings.temperature.toString());
83
+ }
84
+ if (this.settings.language != null) {
85
+ formData.append("language", this.settings.language);
86
+ }
87
+ return postToApi({
88
+ url: api.assembleUrl("/audio/transcriptions"),
89
+ headers: api.headers,
90
+ body: {
91
+ content: formData,
92
+ values: {
93
+ model: this.settings.model,
94
+ prompt: this.settings.prompt,
95
+ response_format: options.responseFormat,
96
+ temperature: this.settings.temperature,
97
+ language: this.settings.language,
98
+ },
99
+ },
100
+ failedResponseHandler: failedOpenAICallResponseHandler,
101
+ successfulResponseHandler: options.responseFormat.handler,
102
+ abortSignal,
103
+ });
104
+ },
77
105
  });
78
106
  }
79
107
  get settingsForEvent() {
@@ -86,40 +114,6 @@ export class OpenAITranscriptionModel extends AbstractModel {
86
114
  return new OpenAITranscriptionModel(Object.assign({}, this.settings, additionalSettings));
87
115
  }
88
116
  }
89
- async function callOpenAITranscriptionAPI({ api = new OpenAIApiConfiguration(), abortSignal, model, file, prompt, responseFormat, temperature, language, }) {
90
- const formData = new FormData();
91
- formData.append("file", new Blob([file.data]), file.name);
92
- formData.append("model", model);
93
- if (prompt) {
94
- formData.append("prompt", prompt);
95
- }
96
- if (responseFormat) {
97
- formData.append("response_format", responseFormat.type);
98
- }
99
- if (temperature) {
100
- formData.append("temperature", temperature.toString());
101
- }
102
- if (language) {
103
- formData.append("language", language);
104
- }
105
- return postToApi({
106
- url: api.assembleUrl("/audio/transcriptions"),
107
- headers: api.headers,
108
- body: {
109
- content: formData,
110
- values: {
111
- model,
112
- prompt,
113
- response_format: responseFormat,
114
- temperature,
115
- language,
116
- },
117
- },
118
- failedResponseHandler: failedOpenAICallResponseHandler,
119
- successfulResponseHandler: responseFormat.handler,
120
- abortSignal,
121
- });
122
- }
123
117
  const openAITranscriptionJsonSchema = z.object({
124
118
  text: z.string(),
125
119
  });
@@ -145,11 +139,11 @@ const openAITranscriptionVerboseJsonSchema = z.object({
145
139
  export const OpenAITranscriptionResponseFormat = {
146
140
  json: {
147
141
  type: "json",
148
- handler: createJsonResponseHandler(openAITranscriptionJsonSchema),
142
+ handler: createJsonResponseHandler(zodSchema(openAITranscriptionJsonSchema)),
149
143
  },
150
144
  verboseJson: {
151
145
  type: "verbose_json",
152
- handler: createJsonResponseHandler(openAITranscriptionVerboseJsonSchema),
146
+ handler: createJsonResponseHandler(zodSchema(openAITranscriptionVerboseJsonSchema)),
153
147
  },
154
148
  text: {
155
149
  type: "text",
@@ -6,8 +6,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.TikTokenTokenizer = void 0;
7
7
  const lite_1 = require("js-tiktoken/lite");
8
8
  const cl100k_base_1 = __importDefault(require("js-tiktoken/ranks/cl100k_base"));
9
- const p50k_base_1 = __importDefault(require("js-tiktoken/ranks/p50k_base"));
10
- const r50k_base_1 = __importDefault(require("js-tiktoken/ranks/r50k_base"));
11
9
  const never_js_1 = require("../../util/never.cjs");
12
10
  /**
13
11
  * TikToken tokenizer for OpenAI language models.
@@ -56,22 +54,6 @@ exports.TikTokenTokenizer = TikTokenTokenizer;
56
54
  // when new models are released
57
55
  function getTiktokenBPE(model) {
58
56
  switch (model) {
59
- case "code-davinci-002":
60
- case "text-davinci-002":
61
- case "text-davinci-003": {
62
- return p50k_base_1.default;
63
- }
64
- case "ada":
65
- case "babbage":
66
- case "curie":
67
- case "davinci":
68
- case "text-ada-001":
69
- case "text-babbage-001":
70
- case "text-curie-001": {
71
- return r50k_base_1.default;
72
- }
73
- case "babbage-002":
74
- case "davinci-002":
75
57
  case "gpt-3.5-turbo":
76
58
  case "gpt-3.5-turbo-0301":
77
59
  case "gpt-3.5-turbo-0613":