modelfusion 0.0.44

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (405) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +429 -0
  3. package/composed-function/index.cjs +22 -0
  4. package/composed-function/index.d.ts +6 -0
  5. package/composed-function/index.js +6 -0
  6. package/composed-function/summarize/SummarizationFunction.cjs +2 -0
  7. package/composed-function/summarize/SummarizationFunction.d.ts +4 -0
  8. package/composed-function/summarize/SummarizationFunction.js +1 -0
  9. package/composed-function/summarize/summarizeRecursively.cjs +19 -0
  10. package/composed-function/summarize/summarizeRecursively.d.ts +11 -0
  11. package/composed-function/summarize/summarizeRecursively.js +15 -0
  12. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +29 -0
  13. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +24 -0
  14. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +25 -0
  15. package/composed-function/use-tool/NoSuchToolError.cjs +17 -0
  16. package/composed-function/use-tool/NoSuchToolError.d.ts +4 -0
  17. package/composed-function/use-tool/NoSuchToolError.js +13 -0
  18. package/composed-function/use-tool/Tool.cjs +43 -0
  19. package/composed-function/use-tool/Tool.d.ts +15 -0
  20. package/composed-function/use-tool/Tool.js +39 -0
  21. package/composed-function/use-tool/useTool.cjs +59 -0
  22. package/composed-function/use-tool/useTool.d.ts +36 -0
  23. package/composed-function/use-tool/useTool.js +54 -0
  24. package/cost/Cost.cjs +38 -0
  25. package/cost/Cost.d.ts +16 -0
  26. package/cost/Cost.js +34 -0
  27. package/cost/CostCalculator.cjs +2 -0
  28. package/cost/CostCalculator.d.ts +8 -0
  29. package/cost/CostCalculator.js +1 -0
  30. package/cost/calculateCost.cjs +28 -0
  31. package/cost/calculateCost.d.ts +7 -0
  32. package/cost/calculateCost.js +24 -0
  33. package/cost/index.cjs +19 -0
  34. package/cost/index.d.ts +3 -0
  35. package/cost/index.js +3 -0
  36. package/index.cjs +25 -0
  37. package/index.d.ts +9 -0
  38. package/index.js +9 -0
  39. package/model-function/AbstractModel.cjs +22 -0
  40. package/model-function/AbstractModel.d.ts +12 -0
  41. package/model-function/AbstractModel.js +18 -0
  42. package/model-function/FunctionOptions.cjs +2 -0
  43. package/model-function/FunctionOptions.d.ts +6 -0
  44. package/model-function/FunctionOptions.js +1 -0
  45. package/model-function/Model.cjs +2 -0
  46. package/model-function/Model.d.ts +23 -0
  47. package/model-function/Model.js +1 -0
  48. package/model-function/ModelCallEvent.cjs +2 -0
  49. package/model-function/ModelCallEvent.d.ts +18 -0
  50. package/model-function/ModelCallEvent.js +1 -0
  51. package/model-function/ModelCallEventSource.cjs +42 -0
  52. package/model-function/ModelCallEventSource.d.ts +13 -0
  53. package/model-function/ModelCallEventSource.js +38 -0
  54. package/model-function/ModelCallObserver.cjs +2 -0
  55. package/model-function/ModelCallObserver.d.ts +5 -0
  56. package/model-function/ModelCallObserver.js +1 -0
  57. package/model-function/ModelInformation.cjs +2 -0
  58. package/model-function/ModelInformation.d.ts +4 -0
  59. package/model-function/ModelInformation.js +1 -0
  60. package/model-function/SuccessfulModelCall.cjs +22 -0
  61. package/model-function/SuccessfulModelCall.d.ts +9 -0
  62. package/model-function/SuccessfulModelCall.js +18 -0
  63. package/model-function/embed-text/TextEmbeddingEvent.cjs +2 -0
  64. package/model-function/embed-text/TextEmbeddingEvent.d.ts +23 -0
  65. package/model-function/embed-text/TextEmbeddingEvent.js +1 -0
  66. package/model-function/embed-text/TextEmbeddingModel.cjs +2 -0
  67. package/model-function/embed-text/TextEmbeddingModel.d.ts +18 -0
  68. package/model-function/embed-text/TextEmbeddingModel.js +1 -0
  69. package/model-function/embed-text/embedText.cjs +90 -0
  70. package/model-function/embed-text/embedText.d.ts +33 -0
  71. package/model-function/embed-text/embedText.js +85 -0
  72. package/model-function/executeCall.cjs +60 -0
  73. package/model-function/executeCall.d.ts +27 -0
  74. package/model-function/executeCall.js +56 -0
  75. package/model-function/generate-image/ImageGenerationEvent.cjs +2 -0
  76. package/model-function/generate-image/ImageGenerationEvent.d.ts +22 -0
  77. package/model-function/generate-image/ImageGenerationEvent.js +1 -0
  78. package/model-function/generate-image/ImageGenerationModel.cjs +2 -0
  79. package/model-function/generate-image/ImageGenerationModel.d.ts +8 -0
  80. package/model-function/generate-image/ImageGenerationModel.js +1 -0
  81. package/model-function/generate-image/generateImage.cjs +63 -0
  82. package/model-function/generate-image/generateImage.d.ts +23 -0
  83. package/model-function/generate-image/generateImage.js +59 -0
  84. package/model-function/generate-json/GenerateJsonModel.cjs +2 -0
  85. package/model-function/generate-json/GenerateJsonModel.d.ts +10 -0
  86. package/model-function/generate-json/GenerateJsonModel.js +1 -0
  87. package/model-function/generate-json/GenerateJsonOrTextModel.cjs +2 -0
  88. package/model-function/generate-json/GenerateJsonOrTextModel.d.ts +18 -0
  89. package/model-function/generate-json/GenerateJsonOrTextModel.js +1 -0
  90. package/model-function/generate-json/JsonGenerationEvent.cjs +2 -0
  91. package/model-function/generate-json/JsonGenerationEvent.d.ts +22 -0
  92. package/model-function/generate-json/JsonGenerationEvent.js +1 -0
  93. package/model-function/generate-json/NoSuchSchemaError.cjs +17 -0
  94. package/model-function/generate-json/NoSuchSchemaError.d.ts +4 -0
  95. package/model-function/generate-json/NoSuchSchemaError.js +13 -0
  96. package/model-function/generate-json/SchemaDefinition.cjs +2 -0
  97. package/model-function/generate-json/SchemaDefinition.d.ts +6 -0
  98. package/model-function/generate-json/SchemaDefinition.js +1 -0
  99. package/model-function/generate-json/SchemaValidationError.cjs +36 -0
  100. package/model-function/generate-json/SchemaValidationError.d.ts +11 -0
  101. package/model-function/generate-json/SchemaValidationError.js +32 -0
  102. package/model-function/generate-json/generateJson.cjs +61 -0
  103. package/model-function/generate-json/generateJson.d.ts +9 -0
  104. package/model-function/generate-json/generateJson.js +57 -0
  105. package/model-function/generate-json/generateJsonOrText.cjs +74 -0
  106. package/model-function/generate-json/generateJsonOrText.d.ts +25 -0
  107. package/model-function/generate-json/generateJsonOrText.js +70 -0
  108. package/model-function/generate-text/AsyncQueue.cjs +66 -0
  109. package/model-function/generate-text/AsyncQueue.d.ts +17 -0
  110. package/model-function/generate-text/AsyncQueue.js +62 -0
  111. package/model-function/generate-text/DeltaEvent.cjs +2 -0
  112. package/model-function/generate-text/DeltaEvent.d.ts +7 -0
  113. package/model-function/generate-text/DeltaEvent.js +1 -0
  114. package/model-function/generate-text/TextDeltaEventSource.cjs +54 -0
  115. package/model-function/generate-text/TextDeltaEventSource.d.ts +5 -0
  116. package/model-function/generate-text/TextDeltaEventSource.js +46 -0
  117. package/model-function/generate-text/TextGenerationEvent.cjs +2 -0
  118. package/model-function/generate-text/TextGenerationEvent.d.ts +22 -0
  119. package/model-function/generate-text/TextGenerationEvent.js +1 -0
  120. package/model-function/generate-text/TextGenerationModel.cjs +2 -0
  121. package/model-function/generate-text/TextGenerationModel.d.ts +42 -0
  122. package/model-function/generate-text/TextGenerationModel.js +1 -0
  123. package/model-function/generate-text/TextStreamingEvent.cjs +2 -0
  124. package/model-function/generate-text/TextStreamingEvent.d.ts +22 -0
  125. package/model-function/generate-text/TextStreamingEvent.js +1 -0
  126. package/model-function/generate-text/extractTextDeltas.cjs +23 -0
  127. package/model-function/generate-text/extractTextDeltas.d.ts +7 -0
  128. package/model-function/generate-text/extractTextDeltas.js +19 -0
  129. package/model-function/generate-text/generateText.cjs +67 -0
  130. package/model-function/generate-text/generateText.d.ts +20 -0
  131. package/model-function/generate-text/generateText.js +63 -0
  132. package/model-function/generate-text/parseEventSourceReadableStream.cjs +30 -0
  133. package/model-function/generate-text/parseEventSourceReadableStream.d.ts +8 -0
  134. package/model-function/generate-text/parseEventSourceReadableStream.js +26 -0
  135. package/model-function/generate-text/streamText.cjs +115 -0
  136. package/model-function/generate-text/streamText.d.ts +11 -0
  137. package/model-function/generate-text/streamText.js +111 -0
  138. package/model-function/index.cjs +47 -0
  139. package/model-function/index.d.ts +31 -0
  140. package/model-function/index.js +31 -0
  141. package/model-function/tokenize-text/Tokenizer.cjs +2 -0
  142. package/model-function/tokenize-text/Tokenizer.d.ts +19 -0
  143. package/model-function/tokenize-text/Tokenizer.js +1 -0
  144. package/model-function/tokenize-text/countTokens.cjs +10 -0
  145. package/model-function/tokenize-text/countTokens.d.ts +5 -0
  146. package/model-function/tokenize-text/countTokens.js +6 -0
  147. package/model-function/transcribe-audio/TranscriptionEvent.cjs +2 -0
  148. package/model-function/transcribe-audio/TranscriptionEvent.d.ts +22 -0
  149. package/model-function/transcribe-audio/TranscriptionEvent.js +1 -0
  150. package/model-function/transcribe-audio/TranscriptionModel.cjs +2 -0
  151. package/model-function/transcribe-audio/TranscriptionModel.d.ts +8 -0
  152. package/model-function/transcribe-audio/TranscriptionModel.js +1 -0
  153. package/model-function/transcribe-audio/transcribe.cjs +62 -0
  154. package/model-function/transcribe-audio/transcribe.d.ts +22 -0
  155. package/model-function/transcribe-audio/transcribe.js +58 -0
  156. package/model-provider/automatic1111/Automatic1111Error.cjs +39 -0
  157. package/model-provider/automatic1111/Automatic1111Error.d.ts +31 -0
  158. package/model-provider/automatic1111/Automatic1111Error.js +31 -0
  159. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +76 -0
  160. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +54 -0
  161. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +72 -0
  162. package/model-provider/automatic1111/index.cjs +20 -0
  163. package/model-provider/automatic1111/index.d.ts +2 -0
  164. package/model-provider/automatic1111/index.js +2 -0
  165. package/model-provider/cohere/CohereError.cjs +36 -0
  166. package/model-provider/cohere/CohereError.d.ts +22 -0
  167. package/model-provider/cohere/CohereError.js +28 -0
  168. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +172 -0
  169. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +119 -0
  170. package/model-provider/cohere/CohereTextEmbeddingModel.js +165 -0
  171. package/model-provider/cohere/CohereTextGenerationModel.cjs +283 -0
  172. package/model-provider/cohere/CohereTextGenerationModel.d.ts +203 -0
  173. package/model-provider/cohere/CohereTextGenerationModel.js +276 -0
  174. package/model-provider/cohere/CohereTokenizer.cjs +136 -0
  175. package/model-provider/cohere/CohereTokenizer.d.ts +118 -0
  176. package/model-provider/cohere/CohereTokenizer.js +129 -0
  177. package/model-provider/cohere/index.cjs +22 -0
  178. package/model-provider/cohere/index.d.ts +4 -0
  179. package/model-provider/cohere/index.js +4 -0
  180. package/model-provider/huggingface/HuggingFaceError.cjs +52 -0
  181. package/model-provider/huggingface/HuggingFaceError.d.ts +22 -0
  182. package/model-provider/huggingface/HuggingFaceError.js +44 -0
  183. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +174 -0
  184. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +75 -0
  185. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +167 -0
  186. package/model-provider/huggingface/index.cjs +20 -0
  187. package/model-provider/huggingface/index.d.ts +2 -0
  188. package/model-provider/huggingface/index.js +2 -0
  189. package/model-provider/index.cjs +22 -0
  190. package/model-provider/index.d.ts +6 -0
  191. package/model-provider/index.js +6 -0
  192. package/model-provider/llamacpp/LlamaCppError.cjs +52 -0
  193. package/model-provider/llamacpp/LlamaCppError.d.ts +22 -0
  194. package/model-provider/llamacpp/LlamaCppError.js +44 -0
  195. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +96 -0
  196. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +40 -0
  197. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +89 -0
  198. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +245 -0
  199. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +399 -0
  200. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +238 -0
  201. package/model-provider/llamacpp/LlamaCppTokenizer.cjs +64 -0
  202. package/model-provider/llamacpp/LlamaCppTokenizer.d.ts +38 -0
  203. package/model-provider/llamacpp/LlamaCppTokenizer.js +57 -0
  204. package/model-provider/llamacpp/index.cjs +22 -0
  205. package/model-provider/llamacpp/index.d.ts +4 -0
  206. package/model-provider/llamacpp/index.js +4 -0
  207. package/model-provider/openai/OpenAICostCalculator.cjs +71 -0
  208. package/model-provider/openai/OpenAICostCalculator.d.ts +6 -0
  209. package/model-provider/openai/OpenAICostCalculator.js +67 -0
  210. package/model-provider/openai/OpenAIError.cjs +50 -0
  211. package/model-provider/openai/OpenAIError.d.ts +47 -0
  212. package/model-provider/openai/OpenAIError.js +42 -0
  213. package/model-provider/openai/OpenAIImageGenerationModel.cjs +124 -0
  214. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +113 -0
  215. package/model-provider/openai/OpenAIImageGenerationModel.js +119 -0
  216. package/model-provider/openai/OpenAIModelSettings.cjs +2 -0
  217. package/model-provider/openai/OpenAIModelSettings.d.ts +8 -0
  218. package/model-provider/openai/OpenAIModelSettings.js +1 -0
  219. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +171 -0
  220. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +122 -0
  221. package/model-provider/openai/OpenAITextEmbeddingModel.js +162 -0
  222. package/model-provider/openai/OpenAITextGenerationModel.cjs +326 -0
  223. package/model-provider/openai/OpenAITextGenerationModel.d.ts +254 -0
  224. package/model-provider/openai/OpenAITextGenerationModel.js +317 -0
  225. package/model-provider/openai/OpenAITranscriptionModel.cjs +195 -0
  226. package/model-provider/openai/OpenAITranscriptionModel.d.ts +196 -0
  227. package/model-provider/openai/OpenAITranscriptionModel.js +187 -0
  228. package/model-provider/openai/TikTokenTokenizer.cjs +86 -0
  229. package/model-provider/openai/TikTokenTokenizer.d.ts +35 -0
  230. package/model-provider/openai/TikTokenTokenizer.js +82 -0
  231. package/model-provider/openai/chat/OpenAIChatMessage.cjs +24 -0
  232. package/model-provider/openai/chat/OpenAIChatMessage.d.ts +26 -0
  233. package/model-provider/openai/chat/OpenAIChatMessage.js +21 -0
  234. package/model-provider/openai/chat/OpenAIChatModel.cjs +288 -0
  235. package/model-provider/openai/chat/OpenAIChatModel.d.ts +344 -0
  236. package/model-provider/openai/chat/OpenAIChatModel.js +279 -0
  237. package/model-provider/openai/chat/OpenAIChatPrompt.cjs +143 -0
  238. package/model-provider/openai/chat/OpenAIChatPrompt.d.ts +108 -0
  239. package/model-provider/openai/chat/OpenAIChatPrompt.js +135 -0
  240. package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +112 -0
  241. package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +19 -0
  242. package/model-provider/openai/chat/OpenAIChatStreamIterable.js +105 -0
  243. package/model-provider/openai/chat/countOpenAIChatMessageTokens.cjs +28 -0
  244. package/model-provider/openai/chat/countOpenAIChatMessageTokens.d.ts +20 -0
  245. package/model-provider/openai/chat/countOpenAIChatMessageTokens.js +23 -0
  246. package/model-provider/openai/index.cjs +31 -0
  247. package/model-provider/openai/index.d.ts +13 -0
  248. package/model-provider/openai/index.js +12 -0
  249. package/model-provider/stability/StabilityError.cjs +36 -0
  250. package/model-provider/stability/StabilityError.d.ts +22 -0
  251. package/model-provider/stability/StabilityError.js +28 -0
  252. package/model-provider/stability/StabilityImageGenerationModel.cjs +133 -0
  253. package/model-provider/stability/StabilityImageGenerationModel.d.ts +95 -0
  254. package/model-provider/stability/StabilityImageGenerationModel.js +129 -0
  255. package/model-provider/stability/index.cjs +20 -0
  256. package/model-provider/stability/index.d.ts +2 -0
  257. package/model-provider/stability/index.js +2 -0
  258. package/package.json +87 -0
  259. package/prompt/InstructionPrompt.cjs +2 -0
  260. package/prompt/InstructionPrompt.d.ts +7 -0
  261. package/prompt/InstructionPrompt.js +1 -0
  262. package/prompt/Llama2PromptMapping.cjs +56 -0
  263. package/prompt/Llama2PromptMapping.d.ts +10 -0
  264. package/prompt/Llama2PromptMapping.js +51 -0
  265. package/prompt/OpenAIChatPromptMapping.cjs +62 -0
  266. package/prompt/OpenAIChatPromptMapping.d.ts +6 -0
  267. package/prompt/OpenAIChatPromptMapping.js +57 -0
  268. package/prompt/PromptMapping.cjs +2 -0
  269. package/prompt/PromptMapping.d.ts +7 -0
  270. package/prompt/PromptMapping.js +1 -0
  271. package/prompt/PromptMappingTextGenerationModel.cjs +88 -0
  272. package/prompt/PromptMappingTextGenerationModel.d.ts +26 -0
  273. package/prompt/PromptMappingTextGenerationModel.js +84 -0
  274. package/prompt/TextPromptMapping.cjs +50 -0
  275. package/prompt/TextPromptMapping.d.ts +14 -0
  276. package/prompt/TextPromptMapping.js +45 -0
  277. package/prompt/chat/ChatPrompt.cjs +2 -0
  278. package/prompt/chat/ChatPrompt.d.ts +33 -0
  279. package/prompt/chat/ChatPrompt.js +1 -0
  280. package/prompt/chat/trimChatPrompt.cjs +50 -0
  281. package/prompt/chat/trimChatPrompt.d.ts +19 -0
  282. package/prompt/chat/trimChatPrompt.js +46 -0
  283. package/prompt/chat/validateChatPrompt.cjs +36 -0
  284. package/prompt/chat/validateChatPrompt.d.ts +8 -0
  285. package/prompt/chat/validateChatPrompt.js +31 -0
  286. package/prompt/index.cjs +25 -0
  287. package/prompt/index.d.ts +9 -0
  288. package/prompt/index.js +9 -0
  289. package/run/ConsoleLogger.cjs +12 -0
  290. package/run/ConsoleLogger.d.ts +6 -0
  291. package/run/ConsoleLogger.js +8 -0
  292. package/run/DefaultRun.cjs +78 -0
  293. package/run/DefaultRun.d.ts +24 -0
  294. package/run/DefaultRun.js +74 -0
  295. package/run/IdMetadata.cjs +2 -0
  296. package/run/IdMetadata.d.ts +7 -0
  297. package/run/IdMetadata.js +1 -0
  298. package/run/Run.cjs +2 -0
  299. package/run/Run.d.ts +27 -0
  300. package/run/Run.js +1 -0
  301. package/run/RunFunction.cjs +2 -0
  302. package/run/RunFunction.d.ts +13 -0
  303. package/run/RunFunction.js +1 -0
  304. package/run/Vector.cjs +2 -0
  305. package/run/Vector.d.ts +5 -0
  306. package/run/Vector.js +1 -0
  307. package/run/index.cjs +22 -0
  308. package/run/index.d.ts +6 -0
  309. package/run/index.js +6 -0
  310. package/text-chunk/TextChunk.cjs +2 -0
  311. package/text-chunk/TextChunk.d.ts +3 -0
  312. package/text-chunk/TextChunk.js +1 -0
  313. package/text-chunk/index.cjs +22 -0
  314. package/text-chunk/index.d.ts +6 -0
  315. package/text-chunk/index.js +6 -0
  316. package/text-chunk/retrieve-text-chunks/TextChunkRetriever.cjs +2 -0
  317. package/text-chunk/retrieve-text-chunks/TextChunkRetriever.d.ts +8 -0
  318. package/text-chunk/retrieve-text-chunks/TextChunkRetriever.js +1 -0
  319. package/text-chunk/retrieve-text-chunks/retrieveTextChunks.cjs +10 -0
  320. package/text-chunk/retrieve-text-chunks/retrieveTextChunks.d.ts +6 -0
  321. package/text-chunk/retrieve-text-chunks/retrieveTextChunks.js +6 -0
  322. package/text-chunk/split/SplitFunction.cjs +2 -0
  323. package/text-chunk/split/SplitFunction.d.ts +4 -0
  324. package/text-chunk/split/SplitFunction.js +1 -0
  325. package/text-chunk/split/splitOnSeparator.cjs +12 -0
  326. package/text-chunk/split/splitOnSeparator.d.ts +8 -0
  327. package/text-chunk/split/splitOnSeparator.js +7 -0
  328. package/text-chunk/split/splitRecursively.cjs +41 -0
  329. package/text-chunk/split/splitRecursively.d.ts +22 -0
  330. package/text-chunk/split/splitRecursively.js +33 -0
  331. package/util/DurationMeasurement.cjs +42 -0
  332. package/util/DurationMeasurement.d.ts +5 -0
  333. package/util/DurationMeasurement.js +38 -0
  334. package/util/ErrorHandler.cjs +2 -0
  335. package/util/ErrorHandler.d.ts +1 -0
  336. package/util/ErrorHandler.js +1 -0
  337. package/util/SafeResult.cjs +2 -0
  338. package/util/SafeResult.d.ts +8 -0
  339. package/util/SafeResult.js +1 -0
  340. package/util/api/AbortError.cjs +9 -0
  341. package/util/api/AbortError.d.ts +3 -0
  342. package/util/api/AbortError.js +5 -0
  343. package/util/api/ApiCallError.cjs +45 -0
  344. package/util/api/ApiCallError.d.ts +15 -0
  345. package/util/api/ApiCallError.js +41 -0
  346. package/util/api/RetryError.cjs +24 -0
  347. package/util/api/RetryError.d.ts +10 -0
  348. package/util/api/RetryError.js +20 -0
  349. package/util/api/RetryFunction.cjs +2 -0
  350. package/util/api/RetryFunction.d.ts +1 -0
  351. package/util/api/RetryFunction.js +1 -0
  352. package/util/api/ThrottleFunction.cjs +2 -0
  353. package/util/api/ThrottleFunction.d.ts +1 -0
  354. package/util/api/ThrottleFunction.js +1 -0
  355. package/util/api/callWithRetryAndThrottle.cjs +7 -0
  356. package/util/api/callWithRetryAndThrottle.d.ts +7 -0
  357. package/util/api/callWithRetryAndThrottle.js +3 -0
  358. package/util/api/postToApi.cjs +103 -0
  359. package/util/api/postToApi.d.ts +29 -0
  360. package/util/api/postToApi.js +96 -0
  361. package/util/api/retryNever.cjs +8 -0
  362. package/util/api/retryNever.d.ts +4 -0
  363. package/util/api/retryNever.js +4 -0
  364. package/util/api/retryWithExponentialBackoff.cjs +48 -0
  365. package/util/api/retryWithExponentialBackoff.d.ts +10 -0
  366. package/util/api/retryWithExponentialBackoff.js +44 -0
  367. package/util/api/throttleMaxConcurrency.cjs +65 -0
  368. package/util/api/throttleMaxConcurrency.d.ts +7 -0
  369. package/util/api/throttleMaxConcurrency.js +61 -0
  370. package/util/api/throttleUnlimitedConcurrency.cjs +8 -0
  371. package/util/api/throttleUnlimitedConcurrency.d.ts +5 -0
  372. package/util/api/throttleUnlimitedConcurrency.js +4 -0
  373. package/util/cosineSimilarity.cjs +26 -0
  374. package/util/cosineSimilarity.d.ts +11 -0
  375. package/util/cosineSimilarity.js +22 -0
  376. package/util/index.cjs +26 -0
  377. package/util/index.d.ts +10 -0
  378. package/util/index.js +10 -0
  379. package/util/never.cjs +6 -0
  380. package/util/never.d.ts +1 -0
  381. package/util/never.js +2 -0
  382. package/util/runSafe.cjs +15 -0
  383. package/util/runSafe.d.ts +2 -0
  384. package/util/runSafe.js +11 -0
  385. package/vector-index/VectorIndex.cjs +2 -0
  386. package/vector-index/VectorIndex.d.ts +18 -0
  387. package/vector-index/VectorIndex.js +1 -0
  388. package/vector-index/VectorIndexSimilarTextChunkRetriever.cjs +57 -0
  389. package/vector-index/VectorIndexSimilarTextChunkRetriever.d.ts +20 -0
  390. package/vector-index/VectorIndexSimilarTextChunkRetriever.js +53 -0
  391. package/vector-index/VectorIndexTextChunkStore.cjs +77 -0
  392. package/vector-index/VectorIndexTextChunkStore.d.ts +35 -0
  393. package/vector-index/VectorIndexTextChunkStore.js +73 -0
  394. package/vector-index/index.cjs +22 -0
  395. package/vector-index/index.d.ts +6 -0
  396. package/vector-index/index.js +6 -0
  397. package/vector-index/memory/MemoryVectorIndex.cjs +63 -0
  398. package/vector-index/memory/MemoryVectorIndex.d.ts +31 -0
  399. package/vector-index/memory/MemoryVectorIndex.js +56 -0
  400. package/vector-index/pinecone/PineconeVectorIndex.cjs +66 -0
  401. package/vector-index/pinecone/PineconeVectorIndex.d.ts +29 -0
  402. package/vector-index/pinecone/PineconeVectorIndex.js +62 -0
  403. package/vector-index/upsertTextChunks.cjs +15 -0
  404. package/vector-index/upsertTextChunks.d.ts +11 -0
  405. package/vector-index/upsertTextChunks.js +11 -0
@@ -0,0 +1,317 @@
1
+ import SecureJSON from "secure-json-parse";
2
+ import z from "zod";
3
+ import { AbstractModel } from "../../model-function/AbstractModel.js";
4
+ import { AsyncQueue } from "../../model-function/generate-text/AsyncQueue.js";
5
+ import { parseEventSourceReadableStream } from "../../model-function/generate-text/parseEventSourceReadableStream.js";
6
+ import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
7
+ import { PromptMappingTextGenerationModel } from "../../prompt/PromptMappingTextGenerationModel.js";
8
+ import { callWithRetryAndThrottle } from "../../util/api/callWithRetryAndThrottle.js";
9
+ import { createJsonResponseHandler, postJsonToApi, } from "../../util/api/postToApi.js";
10
+ import { failedOpenAICallResponseHandler } from "./OpenAIError.js";
11
+ import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
12
+ /**
13
+ * @see https://platform.openai.com/docs/models/
14
+ * @see https://openai.com/pricing
15
+ */
16
+ export const OPENAI_TEXT_GENERATION_MODELS = {
17
+ "text-davinci-003": {
18
+ contextWindowSize: 4096,
19
+ tokenCostInMillicents: 2,
20
+ },
21
+ "text-davinci-002": {
22
+ contextWindowSize: 4096,
23
+ tokenCostInMillicents: 2,
24
+ },
25
+ "code-davinci-002": {
26
+ contextWindowSize: 8000,
27
+ tokenCostInMillicents: 2,
28
+ },
29
+ davinci: {
30
+ contextWindowSize: 2048,
31
+ tokenCostInMillicents: 2,
32
+ },
33
+ "text-curie-001": {
34
+ contextWindowSize: 2048,
35
+ tokenCostInMillicents: 0.2,
36
+ },
37
+ curie: {
38
+ contextWindowSize: 2048,
39
+ tokenCostInMillicents: 0.2,
40
+ },
41
+ "text-babbage-001": {
42
+ contextWindowSize: 2048,
43
+ tokenCostInMillicents: 0.05,
44
+ },
45
+ babbage: {
46
+ contextWindowSize: 2048,
47
+ tokenCostInMillicents: 0.05,
48
+ },
49
+ "text-ada-001": {
50
+ contextWindowSize: 2048,
51
+ tokenCostInMillicents: 0.04,
52
+ },
53
+ ada: {
54
+ contextWindowSize: 2048,
55
+ tokenCostInMillicents: 0.04,
56
+ },
57
+ };
58
+ export const isOpenAITextGenerationModel = (model) => model in OPENAI_TEXT_GENERATION_MODELS;
59
+ export const calculateOpenAITextGenerationCostInMillicents = ({ model, response, }) => response.usage.total_tokens *
60
+ OPENAI_TEXT_GENERATION_MODELS[model].tokenCostInMillicents;
61
+ /**
62
+ * Create a text generation model that calls the OpenAI text completion API.
63
+ *
64
+ * @see https://platform.openai.com/docs/api-reference/completions/create
65
+ *
66
+ * @example
67
+ * const model = new OpenAITextGenerationModel({
68
+ * model: "text-davinci-003",
69
+ * temperature: 0.7,
70
+ * maxTokens: 500,
71
+ * retry: retryWithExponentialBackoff({ maxTries: 5 }),
72
+ * });
73
+ *
74
+ * const { text } = await generateText(
75
+ * model,
76
+ * "Write a short story about a robot learning to love:\n\n"
77
+ * );
78
+ */
79
+ export class OpenAITextGenerationModel extends AbstractModel {
80
+ constructor(settings) {
81
+ super({ settings });
82
+ Object.defineProperty(this, "provider", {
83
+ enumerable: true,
84
+ configurable: true,
85
+ writable: true,
86
+ value: "openai"
87
+ });
88
+ Object.defineProperty(this, "contextWindowSize", {
89
+ enumerable: true,
90
+ configurable: true,
91
+ writable: true,
92
+ value: void 0
93
+ });
94
+ Object.defineProperty(this, "tokenizer", {
95
+ enumerable: true,
96
+ configurable: true,
97
+ writable: true,
98
+ value: void 0
99
+ });
100
+ this.tokenizer = new TikTokenTokenizer({ model: settings.model });
101
+ this.contextWindowSize =
102
+ OPENAI_TEXT_GENERATION_MODELS[settings.model].contextWindowSize;
103
+ }
104
+ get modelName() {
105
+ return this.settings.model;
106
+ }
107
+ get apiKey() {
108
+ const apiKey = this.settings.apiKey ?? process.env.OPENAI_API_KEY;
109
+ if (apiKey == null) {
110
+ throw new Error(`OpenAI API key is missing. Pass it as an argument to the constructor or set it as an environment variable named OPENAI_API_KEY.`);
111
+ }
112
+ return apiKey;
113
+ }
114
+ async countPromptTokens(input) {
115
+ return countTokens(this.tokenizer, input);
116
+ }
117
+ async callAPI(prompt, options) {
118
+ const { run, settings, responseFormat } = options;
119
+ const callSettings = Object.assign({
120
+ apiKey: this.apiKey,
121
+ user: this.settings.isUserIdForwardingEnabled ? run?.userId : undefined,
122
+ }, this.settings, settings, {
123
+ abortSignal: run?.abortSignal,
124
+ prompt,
125
+ responseFormat,
126
+ });
127
+ return callWithRetryAndThrottle({
128
+ retry: callSettings.retry,
129
+ throttle: callSettings.throttle,
130
+ call: async () => callOpenAITextGenerationAPI(callSettings),
131
+ });
132
+ }
133
+ generateTextResponse(prompt, options) {
134
+ return this.callAPI(prompt, {
135
+ ...options,
136
+ responseFormat: OpenAITextResponseFormat.json,
137
+ });
138
+ }
139
+ extractText(response) {
140
+ return response.choices[0].text;
141
+ }
142
+ generateDeltaStreamResponse(prompt, options) {
143
+ return this.callAPI(prompt, {
144
+ ...options,
145
+ responseFormat: OpenAITextResponseFormat.deltaIterable,
146
+ });
147
+ }
148
+ extractTextDelta(fullDelta) {
149
+ return fullDelta[0].delta;
150
+ }
151
+ mapPrompt(promptMapping) {
152
+ return new PromptMappingTextGenerationModel({
153
+ model: this.withStopTokens(promptMapping.stopTokens),
154
+ promptMapping,
155
+ });
156
+ }
157
+ withSettings(additionalSettings) {
158
+ return new OpenAITextGenerationModel(Object.assign({}, this.settings, additionalSettings));
159
+ }
160
+ get maxCompletionTokens() {
161
+ return this.settings.maxTokens;
162
+ }
163
+ withMaxCompletionTokens(maxCompletionTokens) {
164
+ return this.withSettings({ maxTokens: maxCompletionTokens });
165
+ }
166
+ withStopTokens(stopTokens) {
167
+ return this.withSettings({ stop: stopTokens });
168
+ }
169
+ }
170
+ const openAITextGenerationResponseSchema = z.object({
171
+ id: z.string(),
172
+ object: z.literal("text_completion"),
173
+ created: z.number(),
174
+ model: z.string(),
175
+ choices: z.array(z.object({
176
+ text: z.string(),
177
+ index: z.number(),
178
+ logprobs: z.nullable(z.any()),
179
+ finish_reason: z.string(),
180
+ })),
181
+ usage: z.object({
182
+ prompt_tokens: z.number(),
183
+ completion_tokens: z.number(),
184
+ total_tokens: z.number(),
185
+ }),
186
+ });
187
+ /**
188
+ * Call the OpenAI Text Completion API to generate a text completion for the given prompt.
189
+ *
190
+ * @see https://platform.openai.com/docs/api-reference/completions/create
191
+ *
192
+ * @example
193
+ * const response = await callOpenAITextGenerationAPI({
194
+ * apiKey: OPENAI_API_KEY,
195
+ * model: "text-davinci-003",
196
+ * prompt: "Write a short story about a robot learning to love:\n\n",
197
+ * temperature: 0.7,
198
+ * maxTokens: 500,
199
+ * });
200
+ *
201
+ * console.log(response.choices[0].text);
202
+ */
203
+ async function callOpenAITextGenerationAPI({ baseUrl = "https://api.openai.com/v1", abortSignal, responseFormat, apiKey, model, prompt, suffix, maxTokens, temperature, topP, n, logprobs, echo, stop, presencePenalty, frequencyPenalty, bestOf, user, }) {
204
+ return postJsonToApi({
205
+ url: `${baseUrl}/completions`,
206
+ apiKey,
207
+ body: {
208
+ stream: responseFormat.stream,
209
+ model,
210
+ prompt,
211
+ suffix,
212
+ max_tokens: maxTokens,
213
+ temperature,
214
+ top_p: topP,
215
+ n,
216
+ logprobs,
217
+ echo,
218
+ stop,
219
+ presence_penalty: presencePenalty,
220
+ frequency_penalty: frequencyPenalty,
221
+ best_of: bestOf,
222
+ user,
223
+ },
224
+ failedResponseHandler: failedOpenAICallResponseHandler,
225
+ successfulResponseHandler: responseFormat.handler,
226
+ abortSignal,
227
+ });
228
+ }
229
+ export const OpenAITextResponseFormat = {
230
+ /**
231
+ * Returns the response as a JSON object.
232
+ */
233
+ json: {
234
+ stream: false,
235
+ handler: createJsonResponseHandler(openAITextGenerationResponseSchema),
236
+ },
237
+ /**
238
+ * Returns an async iterable over the full deltas (all choices, including full current state at time of event)
239
+ * of the response stream.
240
+ */
241
+ deltaIterable: {
242
+ stream: true,
243
+ handler: async ({ response }) => createOpenAITextFullDeltaIterableQueue(response.body),
244
+ },
245
+ };
246
+ const textResponseStreamEventSchema = z.object({
247
+ choices: z.array(z.object({
248
+ text: z.string(),
249
+ finish_reason: z.enum(["stop", "length"]).nullable(),
250
+ index: z.number(),
251
+ })),
252
+ created: z.number(),
253
+ id: z.string(),
254
+ model: z.string(),
255
+ object: z.string(),
256
+ });
257
+ async function createOpenAITextFullDeltaIterableQueue(stream) {
258
+ const queue = new AsyncQueue();
259
+ const streamDelta = [];
260
+ // process the stream asynchonously (no 'await' on purpose):
261
+ parseEventSourceReadableStream({
262
+ stream,
263
+ callback: (event) => {
264
+ if (event.type !== "event") {
265
+ return;
266
+ }
267
+ const data = event.data;
268
+ if (data === "[DONE]") {
269
+ queue.close();
270
+ return;
271
+ }
272
+ try {
273
+ const json = SecureJSON.parse(data);
274
+ const parseResult = textResponseStreamEventSchema.safeParse(json);
275
+ if (!parseResult.success) {
276
+ queue.push({
277
+ type: "error",
278
+ error: parseResult.error,
279
+ });
280
+ queue.close();
281
+ return;
282
+ }
283
+ const event = parseResult.data;
284
+ for (let i = 0; i < event.choices.length; i++) {
285
+ const eventChoice = event.choices[i];
286
+ const delta = eventChoice.text;
287
+ if (streamDelta[i] == null) {
288
+ streamDelta[i] = {
289
+ content: "",
290
+ isComplete: false,
291
+ delta: "",
292
+ };
293
+ }
294
+ const choice = streamDelta[i];
295
+ choice.delta = delta;
296
+ if (eventChoice.finish_reason != null) {
297
+ choice.isComplete = true;
298
+ }
299
+ choice.content += delta;
300
+ }
301
+ // Since we're mutating the choices array in an async scenario,
302
+ // we need to make a deep copy:
303
+ const streamDeltaDeepCopy = JSON.parse(JSON.stringify(streamDelta));
304
+ queue.push({
305
+ type: "delta",
306
+ fullDelta: streamDeltaDeepCopy,
307
+ });
308
+ }
309
+ catch (error) {
310
+ queue.push({ type: "error", error });
311
+ queue.close();
312
+ return;
313
+ }
314
+ },
315
+ });
316
+ return queue;
317
+ }
@@ -0,0 +1,195 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.OpenAITranscriptionResponseFormat = exports.OpenAITranscriptionModel = exports.calculateOpenAITranscriptionCostInMillicents = exports.OPENAI_TRANSCRIPTION_MODELS = void 0;
7
+ const zod_1 = __importDefault(require("zod"));
8
+ const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
9
+ const callWithRetryAndThrottle_js_1 = require("../../util/api/callWithRetryAndThrottle.cjs");
10
+ const postToApi_js_1 = require("../../util/api/postToApi.cjs");
11
+ const OpenAIError_js_1 = require("./OpenAIError.cjs");
12
+ /**
13
+ * @see https://openai.com/pricing
14
+ */
15
+ exports.OPENAI_TRANSCRIPTION_MODELS = {
16
+ "whisper-1": {
17
+ costInMillicentsPerSecond: 10, // = 600 / 60,
18
+ },
19
+ };
20
+ const calculateOpenAITranscriptionCostInMillicents = ({ model, response, }) => {
21
+ if (model !== "whisper-1") {
22
+ return null;
23
+ }
24
+ const durationInSeconds = response.duration;
25
+ return (Math.ceil(durationInSeconds) *
26
+ exports.OPENAI_TRANSCRIPTION_MODELS[model].costInMillicentsPerSecond);
27
+ };
28
+ exports.calculateOpenAITranscriptionCostInMillicents = calculateOpenAITranscriptionCostInMillicents;
29
+ /**
30
+ * Create a transcription model that calls the OpenAI transcription API.
31
+ *
32
+ * @see https://platform.openai.com/docs/api-reference/audio/create
33
+ *
34
+ * @example
35
+ * const data = await fs.promises.readFile("data/test.mp3");
36
+ *
37
+ * const { transcription } = await transcribe(
38
+ * new OpenAITranscriptionModel({ model: "whisper-1" }),
39
+ * {
40
+ * type: "mp3",
41
+ * data,
42
+ * }
43
+ * );
44
+ */
45
+ class OpenAITranscriptionModel extends AbstractModel_js_1.AbstractModel {
46
+ constructor(settings) {
47
+ super({ settings });
48
+ Object.defineProperty(this, "provider", {
49
+ enumerable: true,
50
+ configurable: true,
51
+ writable: true,
52
+ value: "openai"
53
+ });
54
+ }
55
+ get modelName() {
56
+ return this.settings.model;
57
+ }
58
+ generateTranscriptionResponse(data, options) {
59
+ return this.callAPI(data, {
60
+ responseFormat: exports.OpenAITranscriptionResponseFormat.verboseJson,
61
+ functionId: options?.functionId,
62
+ settings: options?.settings,
63
+ run: options?.run,
64
+ });
65
+ }
66
+ extractTranscriptionText(response) {
67
+ return response.text;
68
+ }
69
+ get apiKey() {
70
+ const apiKey = this.settings.apiKey ?? process.env.OPENAI_API_KEY;
71
+ if (apiKey == null) {
72
+ throw new Error(`OpenAI API key is missing. Pass it as an argument to the constructor or set it as an environment variable named OPENAI_API_KEY.`);
73
+ }
74
+ return apiKey;
75
+ }
76
+ async callAPI(data, options) {
77
+ const run = options?.run;
78
+ const settings = options?.settings;
79
+ const responseFormat = options?.responseFormat;
80
+ const callSettings = Object.assign({
81
+ apiKey: this.apiKey,
82
+ }, this.settings, settings, {
83
+ abortSignal: run?.abortSignal,
84
+ file: {
85
+ name: `audio.${data.type}`,
86
+ data: data.data,
87
+ },
88
+ responseFormat,
89
+ });
90
+ return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
91
+ retry: this.settings.retry,
92
+ throttle: this.settings.throttle,
93
+ call: async () => callOpenAITranscriptionAPI(callSettings),
94
+ });
95
+ }
96
+ withSettings(additionalSettings) {
97
+ return new OpenAITranscriptionModel(Object.assign({}, this.settings, additionalSettings));
98
+ }
99
+ }
100
+ exports.OpenAITranscriptionModel = OpenAITranscriptionModel;
101
+ /**
102
+ * Call the OpenAI Transcription API to generate a transcription from an audio file.
103
+ *
104
+ * @see https://platform.openai.com/docs/api-reference/audio/create
105
+ *
106
+ * @example
107
+ * const transcriptionResponse = await callOpenAITranscriptionAPI({
108
+ * apiKey: openAiApiKey,
109
+ * model: "whisper-1",
110
+ * file: {
111
+ * name: "audio.mp3",
112
+ * data: fileData, // Buffer
113
+ * },
114
+ * responseFormat: callOpenAITranscriptionAPI.responseFormat.json,
115
+ * });
116
+ */
117
+ async function callOpenAITranscriptionAPI({ baseUrl = "https://api.openai.com/v1", abortSignal, apiKey, model, file, prompt, responseFormat, temperature, language, }) {
118
+ const formData = new FormData();
119
+ formData.append("file", new Blob([file.data]), file.name);
120
+ formData.append("model", model);
121
+ if (prompt) {
122
+ formData.append("prompt", prompt);
123
+ }
124
+ if (responseFormat) {
125
+ formData.append("response_format", responseFormat.type);
126
+ }
127
+ if (temperature) {
128
+ formData.append("temperature", temperature.toString());
129
+ }
130
+ if (language) {
131
+ formData.append("language", language);
132
+ }
133
+ return (0, postToApi_js_1.postToApi)({
134
+ url: `${baseUrl}/audio/transcriptions`,
135
+ apiKey,
136
+ contentType: null,
137
+ body: {
138
+ content: formData,
139
+ values: {
140
+ model,
141
+ prompt,
142
+ response_format: responseFormat,
143
+ temperature,
144
+ language,
145
+ },
146
+ },
147
+ failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
148
+ successfulResponseHandler: responseFormat.handler,
149
+ abortSignal,
150
+ });
151
+ }
152
+ const openAITranscriptionJsonSchema = zod_1.default.object({
153
+ text: zod_1.default.string(),
154
+ });
155
+ const openAITranscriptionVerboseJsonSchema = zod_1.default.object({
156
+ task: zod_1.default.literal("transcribe"),
157
+ language: zod_1.default.string(),
158
+ duration: zod_1.default.number(),
159
+ segments: zod_1.default.array(zod_1.default.object({
160
+ id: zod_1.default.number(),
161
+ seek: zod_1.default.number(),
162
+ start: zod_1.default.number(),
163
+ end: zod_1.default.number(),
164
+ text: zod_1.default.string(),
165
+ tokens: zod_1.default.array(zod_1.default.number()),
166
+ temperature: zod_1.default.number(),
167
+ avg_logprob: zod_1.default.number(),
168
+ compression_ratio: zod_1.default.number(),
169
+ no_speech_prob: zod_1.default.number(),
170
+ transient: zod_1.default.boolean().optional(),
171
+ })),
172
+ text: zod_1.default.string(),
173
+ });
174
+ exports.OpenAITranscriptionResponseFormat = {
175
+ json: {
176
+ type: "json",
177
+ handler: (0, postToApi_js_1.createJsonResponseHandler)(openAITranscriptionJsonSchema),
178
+ },
179
+ verboseJson: {
180
+ type: "verbose_json",
181
+ handler: (0, postToApi_js_1.createJsonResponseHandler)(openAITranscriptionVerboseJsonSchema),
182
+ },
183
+ text: {
184
+ type: "text",
185
+ handler: (0, postToApi_js_1.createTextResponseHandler)(),
186
+ },
187
+ srt: {
188
+ type: "srt",
189
+ handler: (0, postToApi_js_1.createTextResponseHandler)(),
190
+ },
191
+ vtt: {
192
+ type: "vtt",
193
+ handler: (0, postToApi_js_1.createTextResponseHandler)(),
194
+ },
195
+ };
@@ -0,0 +1,196 @@
1
+ /// <reference types="node" resolution-mode="require"/>
2
+ import z from "zod";
3
+ import { AbstractModel } from "../../model-function/AbstractModel.js";
4
+ import { FunctionOptions } from "../../model-function/FunctionOptions.js";
5
+ import { TranscriptionModel, TranscriptionModelSettings } from "../../model-function/transcribe-audio/TranscriptionModel.js";
6
+ import { RetryFunction } from "../../util/api/RetryFunction.js";
7
+ import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
8
+ import { ResponseHandler } from "../../util/api/postToApi.js";
9
+ import { OpenAIModelSettings } from "./OpenAIModelSettings.js";
10
+ /**
11
+ * @see https://openai.com/pricing
12
+ */
13
+ export declare const OPENAI_TRANSCRIPTION_MODELS: {
14
+ "whisper-1": {
15
+ costInMillicentsPerSecond: number;
16
+ };
17
+ };
18
+ export type OpenAITranscriptionModelType = keyof typeof OPENAI_TRANSCRIPTION_MODELS;
19
+ export declare const calculateOpenAITranscriptionCostInMillicents: ({ model, response, }: {
20
+ model: OpenAITranscriptionModelType;
21
+ response: OpenAITranscriptionVerboseJsonResponse;
22
+ }) => number | null;
23
+ export interface OpenAITranscriptionModelSettings extends TranscriptionModelSettings {
24
+ model: OpenAITranscriptionModelType;
25
+ baseUrl?: string;
26
+ apiKey?: string;
27
+ retry?: RetryFunction;
28
+ throttle?: ThrottleFunction;
29
+ }
30
+ export type OpenAITranscriptionInput = {
31
+ type: "mp3" | "mp4" | "mpeg" | "mpga" | "m3a" | "wav" | "webm";
32
+ data: Buffer;
33
+ };
34
+ /**
35
+ * Create a transcription model that calls the OpenAI transcription API.
36
+ *
37
+ * @see https://platform.openai.com/docs/api-reference/audio/create
38
+ *
39
+ * @example
40
+ * const data = await fs.promises.readFile("data/test.mp3");
41
+ *
42
+ * const { transcription } = await transcribe(
43
+ * new OpenAITranscriptionModel({ model: "whisper-1" }),
44
+ * {
45
+ * type: "mp3",
46
+ * data,
47
+ * }
48
+ * );
49
+ */
50
+ export declare class OpenAITranscriptionModel extends AbstractModel<OpenAITranscriptionModelSettings> implements TranscriptionModel<OpenAITranscriptionInput, OpenAITranscriptionVerboseJsonResponse, OpenAITranscriptionModelSettings> {
51
+ constructor(settings: OpenAITranscriptionModelSettings);
52
+ readonly provider: "openai";
53
+ get modelName(): "whisper-1";
54
+ generateTranscriptionResponse(data: OpenAITranscriptionInput, options?: FunctionOptions<Partial<OpenAITranscriptionModelSettings & OpenAIModelSettings>>): PromiseLike<OpenAITranscriptionVerboseJsonResponse>;
55
+ extractTranscriptionText(response: OpenAITranscriptionVerboseJsonResponse): string;
56
+ private get apiKey();
57
+ callAPI<RESULT>(data: OpenAITranscriptionInput, options: {
58
+ responseFormat: OpenAITranscriptionResponseFormatType<RESULT>;
59
+ } & FunctionOptions<Partial<OpenAITranscriptionModelSettings & OpenAIModelSettings>>): Promise<RESULT>;
60
+ withSettings(additionalSettings: OpenAITranscriptionModelSettings): this;
61
+ }
62
+ declare const openAITranscriptionJsonSchema: z.ZodObject<{
63
+ text: z.ZodString;
64
+ }, "strip", z.ZodTypeAny, {
65
+ text: string;
66
+ }, {
67
+ text: string;
68
+ }>;
69
+ export type OpenAITranscriptionJsonResponse = z.infer<typeof openAITranscriptionJsonSchema>;
70
+ declare const openAITranscriptionVerboseJsonSchema: z.ZodObject<{
71
+ task: z.ZodLiteral<"transcribe">;
72
+ language: z.ZodString;
73
+ duration: z.ZodNumber;
74
+ segments: z.ZodArray<z.ZodObject<{
75
+ id: z.ZodNumber;
76
+ seek: z.ZodNumber;
77
+ start: z.ZodNumber;
78
+ end: z.ZodNumber;
79
+ text: z.ZodString;
80
+ tokens: z.ZodArray<z.ZodNumber, "many">;
81
+ temperature: z.ZodNumber;
82
+ avg_logprob: z.ZodNumber;
83
+ compression_ratio: z.ZodNumber;
84
+ no_speech_prob: z.ZodNumber;
85
+ transient: z.ZodOptional<z.ZodBoolean>;
86
+ }, "strip", z.ZodTypeAny, {
87
+ text: string;
88
+ temperature: number;
89
+ id: number;
90
+ tokens: number[];
91
+ seek: number;
92
+ start: number;
93
+ end: number;
94
+ avg_logprob: number;
95
+ compression_ratio: number;
96
+ no_speech_prob: number;
97
+ transient?: boolean | undefined;
98
+ }, {
99
+ text: string;
100
+ temperature: number;
101
+ id: number;
102
+ tokens: number[];
103
+ seek: number;
104
+ start: number;
105
+ end: number;
106
+ avg_logprob: number;
107
+ compression_ratio: number;
108
+ no_speech_prob: number;
109
+ transient?: boolean | undefined;
110
+ }>, "many">;
111
+ text: z.ZodString;
112
+ }, "strip", z.ZodTypeAny, {
113
+ text: string;
114
+ segments: {
115
+ text: string;
116
+ temperature: number;
117
+ id: number;
118
+ tokens: number[];
119
+ seek: number;
120
+ start: number;
121
+ end: number;
122
+ avg_logprob: number;
123
+ compression_ratio: number;
124
+ no_speech_prob: number;
125
+ transient?: boolean | undefined;
126
+ }[];
127
+ task: "transcribe";
128
+ language: string;
129
+ duration: number;
130
+ }, {
131
+ text: string;
132
+ segments: {
133
+ text: string;
134
+ temperature: number;
135
+ id: number;
136
+ tokens: number[];
137
+ seek: number;
138
+ start: number;
139
+ end: number;
140
+ avg_logprob: number;
141
+ compression_ratio: number;
142
+ no_speech_prob: number;
143
+ transient?: boolean | undefined;
144
+ }[];
145
+ task: "transcribe";
146
+ language: string;
147
+ duration: number;
148
+ }>;
149
+ export type OpenAITranscriptionVerboseJsonResponse = z.infer<typeof openAITranscriptionVerboseJsonSchema>;
150
+ export type OpenAITranscriptionResponseFormatType<T> = {
151
+ type: "json" | "text" | "srt" | "verbose_json" | "vtt";
152
+ handler: ResponseHandler<T>;
153
+ };
154
+ export declare const OpenAITranscriptionResponseFormat: {
155
+ json: {
156
+ type: "json";
157
+ handler: ResponseHandler<{
158
+ text: string;
159
+ }>;
160
+ };
161
+ verboseJson: {
162
+ type: "verbose_json";
163
+ handler: ResponseHandler<{
164
+ text: string;
165
+ segments: {
166
+ text: string;
167
+ temperature: number;
168
+ id: number;
169
+ tokens: number[];
170
+ seek: number;
171
+ start: number;
172
+ end: number;
173
+ avg_logprob: number;
174
+ compression_ratio: number;
175
+ no_speech_prob: number;
176
+ transient?: boolean | undefined;
177
+ }[];
178
+ task: "transcribe";
179
+ language: string;
180
+ duration: number;
181
+ }>;
182
+ };
183
+ text: {
184
+ type: "text";
185
+ handler: ResponseHandler<string>;
186
+ };
187
+ srt: {
188
+ type: "srt";
189
+ handler: ResponseHandler<string>;
190
+ };
191
+ vtt: {
192
+ type: "vtt";
193
+ handler: ResponseHandler<string>;
194
+ };
195
+ };
196
+ export {};