modelfusion 0.0.44

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (405) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +429 -0
  3. package/composed-function/index.cjs +22 -0
  4. package/composed-function/index.d.ts +6 -0
  5. package/composed-function/index.js +6 -0
  6. package/composed-function/summarize/SummarizationFunction.cjs +2 -0
  7. package/composed-function/summarize/SummarizationFunction.d.ts +4 -0
  8. package/composed-function/summarize/SummarizationFunction.js +1 -0
  9. package/composed-function/summarize/summarizeRecursively.cjs +19 -0
  10. package/composed-function/summarize/summarizeRecursively.d.ts +11 -0
  11. package/composed-function/summarize/summarizeRecursively.js +15 -0
  12. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +29 -0
  13. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +24 -0
  14. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +25 -0
  15. package/composed-function/use-tool/NoSuchToolError.cjs +17 -0
  16. package/composed-function/use-tool/NoSuchToolError.d.ts +4 -0
  17. package/composed-function/use-tool/NoSuchToolError.js +13 -0
  18. package/composed-function/use-tool/Tool.cjs +43 -0
  19. package/composed-function/use-tool/Tool.d.ts +15 -0
  20. package/composed-function/use-tool/Tool.js +39 -0
  21. package/composed-function/use-tool/useTool.cjs +59 -0
  22. package/composed-function/use-tool/useTool.d.ts +36 -0
  23. package/composed-function/use-tool/useTool.js +54 -0
  24. package/cost/Cost.cjs +38 -0
  25. package/cost/Cost.d.ts +16 -0
  26. package/cost/Cost.js +34 -0
  27. package/cost/CostCalculator.cjs +2 -0
  28. package/cost/CostCalculator.d.ts +8 -0
  29. package/cost/CostCalculator.js +1 -0
  30. package/cost/calculateCost.cjs +28 -0
  31. package/cost/calculateCost.d.ts +7 -0
  32. package/cost/calculateCost.js +24 -0
  33. package/cost/index.cjs +19 -0
  34. package/cost/index.d.ts +3 -0
  35. package/cost/index.js +3 -0
  36. package/index.cjs +25 -0
  37. package/index.d.ts +9 -0
  38. package/index.js +9 -0
  39. package/model-function/AbstractModel.cjs +22 -0
  40. package/model-function/AbstractModel.d.ts +12 -0
  41. package/model-function/AbstractModel.js +18 -0
  42. package/model-function/FunctionOptions.cjs +2 -0
  43. package/model-function/FunctionOptions.d.ts +6 -0
  44. package/model-function/FunctionOptions.js +1 -0
  45. package/model-function/Model.cjs +2 -0
  46. package/model-function/Model.d.ts +23 -0
  47. package/model-function/Model.js +1 -0
  48. package/model-function/ModelCallEvent.cjs +2 -0
  49. package/model-function/ModelCallEvent.d.ts +18 -0
  50. package/model-function/ModelCallEvent.js +1 -0
  51. package/model-function/ModelCallEventSource.cjs +42 -0
  52. package/model-function/ModelCallEventSource.d.ts +13 -0
  53. package/model-function/ModelCallEventSource.js +38 -0
  54. package/model-function/ModelCallObserver.cjs +2 -0
  55. package/model-function/ModelCallObserver.d.ts +5 -0
  56. package/model-function/ModelCallObserver.js +1 -0
  57. package/model-function/ModelInformation.cjs +2 -0
  58. package/model-function/ModelInformation.d.ts +4 -0
  59. package/model-function/ModelInformation.js +1 -0
  60. package/model-function/SuccessfulModelCall.cjs +22 -0
  61. package/model-function/SuccessfulModelCall.d.ts +9 -0
  62. package/model-function/SuccessfulModelCall.js +18 -0
  63. package/model-function/embed-text/TextEmbeddingEvent.cjs +2 -0
  64. package/model-function/embed-text/TextEmbeddingEvent.d.ts +23 -0
  65. package/model-function/embed-text/TextEmbeddingEvent.js +1 -0
  66. package/model-function/embed-text/TextEmbeddingModel.cjs +2 -0
  67. package/model-function/embed-text/TextEmbeddingModel.d.ts +18 -0
  68. package/model-function/embed-text/TextEmbeddingModel.js +1 -0
  69. package/model-function/embed-text/embedText.cjs +90 -0
  70. package/model-function/embed-text/embedText.d.ts +33 -0
  71. package/model-function/embed-text/embedText.js +85 -0
  72. package/model-function/executeCall.cjs +60 -0
  73. package/model-function/executeCall.d.ts +27 -0
  74. package/model-function/executeCall.js +56 -0
  75. package/model-function/generate-image/ImageGenerationEvent.cjs +2 -0
  76. package/model-function/generate-image/ImageGenerationEvent.d.ts +22 -0
  77. package/model-function/generate-image/ImageGenerationEvent.js +1 -0
  78. package/model-function/generate-image/ImageGenerationModel.cjs +2 -0
  79. package/model-function/generate-image/ImageGenerationModel.d.ts +8 -0
  80. package/model-function/generate-image/ImageGenerationModel.js +1 -0
  81. package/model-function/generate-image/generateImage.cjs +63 -0
  82. package/model-function/generate-image/generateImage.d.ts +23 -0
  83. package/model-function/generate-image/generateImage.js +59 -0
  84. package/model-function/generate-json/GenerateJsonModel.cjs +2 -0
  85. package/model-function/generate-json/GenerateJsonModel.d.ts +10 -0
  86. package/model-function/generate-json/GenerateJsonModel.js +1 -0
  87. package/model-function/generate-json/GenerateJsonOrTextModel.cjs +2 -0
  88. package/model-function/generate-json/GenerateJsonOrTextModel.d.ts +18 -0
  89. package/model-function/generate-json/GenerateJsonOrTextModel.js +1 -0
  90. package/model-function/generate-json/JsonGenerationEvent.cjs +2 -0
  91. package/model-function/generate-json/JsonGenerationEvent.d.ts +22 -0
  92. package/model-function/generate-json/JsonGenerationEvent.js +1 -0
  93. package/model-function/generate-json/NoSuchSchemaError.cjs +17 -0
  94. package/model-function/generate-json/NoSuchSchemaError.d.ts +4 -0
  95. package/model-function/generate-json/NoSuchSchemaError.js +13 -0
  96. package/model-function/generate-json/SchemaDefinition.cjs +2 -0
  97. package/model-function/generate-json/SchemaDefinition.d.ts +6 -0
  98. package/model-function/generate-json/SchemaDefinition.js +1 -0
  99. package/model-function/generate-json/SchemaValidationError.cjs +36 -0
  100. package/model-function/generate-json/SchemaValidationError.d.ts +11 -0
  101. package/model-function/generate-json/SchemaValidationError.js +32 -0
  102. package/model-function/generate-json/generateJson.cjs +61 -0
  103. package/model-function/generate-json/generateJson.d.ts +9 -0
  104. package/model-function/generate-json/generateJson.js +57 -0
  105. package/model-function/generate-json/generateJsonOrText.cjs +74 -0
  106. package/model-function/generate-json/generateJsonOrText.d.ts +25 -0
  107. package/model-function/generate-json/generateJsonOrText.js +70 -0
  108. package/model-function/generate-text/AsyncQueue.cjs +66 -0
  109. package/model-function/generate-text/AsyncQueue.d.ts +17 -0
  110. package/model-function/generate-text/AsyncQueue.js +62 -0
  111. package/model-function/generate-text/DeltaEvent.cjs +2 -0
  112. package/model-function/generate-text/DeltaEvent.d.ts +7 -0
  113. package/model-function/generate-text/DeltaEvent.js +1 -0
  114. package/model-function/generate-text/TextDeltaEventSource.cjs +54 -0
  115. package/model-function/generate-text/TextDeltaEventSource.d.ts +5 -0
  116. package/model-function/generate-text/TextDeltaEventSource.js +46 -0
  117. package/model-function/generate-text/TextGenerationEvent.cjs +2 -0
  118. package/model-function/generate-text/TextGenerationEvent.d.ts +22 -0
  119. package/model-function/generate-text/TextGenerationEvent.js +1 -0
  120. package/model-function/generate-text/TextGenerationModel.cjs +2 -0
  121. package/model-function/generate-text/TextGenerationModel.d.ts +42 -0
  122. package/model-function/generate-text/TextGenerationModel.js +1 -0
  123. package/model-function/generate-text/TextStreamingEvent.cjs +2 -0
  124. package/model-function/generate-text/TextStreamingEvent.d.ts +22 -0
  125. package/model-function/generate-text/TextStreamingEvent.js +1 -0
  126. package/model-function/generate-text/extractTextDeltas.cjs +23 -0
  127. package/model-function/generate-text/extractTextDeltas.d.ts +7 -0
  128. package/model-function/generate-text/extractTextDeltas.js +19 -0
  129. package/model-function/generate-text/generateText.cjs +67 -0
  130. package/model-function/generate-text/generateText.d.ts +20 -0
  131. package/model-function/generate-text/generateText.js +63 -0
  132. package/model-function/generate-text/parseEventSourceReadableStream.cjs +30 -0
  133. package/model-function/generate-text/parseEventSourceReadableStream.d.ts +8 -0
  134. package/model-function/generate-text/parseEventSourceReadableStream.js +26 -0
  135. package/model-function/generate-text/streamText.cjs +115 -0
  136. package/model-function/generate-text/streamText.d.ts +11 -0
  137. package/model-function/generate-text/streamText.js +111 -0
  138. package/model-function/index.cjs +47 -0
  139. package/model-function/index.d.ts +31 -0
  140. package/model-function/index.js +31 -0
  141. package/model-function/tokenize-text/Tokenizer.cjs +2 -0
  142. package/model-function/tokenize-text/Tokenizer.d.ts +19 -0
  143. package/model-function/tokenize-text/Tokenizer.js +1 -0
  144. package/model-function/tokenize-text/countTokens.cjs +10 -0
  145. package/model-function/tokenize-text/countTokens.d.ts +5 -0
  146. package/model-function/tokenize-text/countTokens.js +6 -0
  147. package/model-function/transcribe-audio/TranscriptionEvent.cjs +2 -0
  148. package/model-function/transcribe-audio/TranscriptionEvent.d.ts +22 -0
  149. package/model-function/transcribe-audio/TranscriptionEvent.js +1 -0
  150. package/model-function/transcribe-audio/TranscriptionModel.cjs +2 -0
  151. package/model-function/transcribe-audio/TranscriptionModel.d.ts +8 -0
  152. package/model-function/transcribe-audio/TranscriptionModel.js +1 -0
  153. package/model-function/transcribe-audio/transcribe.cjs +62 -0
  154. package/model-function/transcribe-audio/transcribe.d.ts +22 -0
  155. package/model-function/transcribe-audio/transcribe.js +58 -0
  156. package/model-provider/automatic1111/Automatic1111Error.cjs +39 -0
  157. package/model-provider/automatic1111/Automatic1111Error.d.ts +31 -0
  158. package/model-provider/automatic1111/Automatic1111Error.js +31 -0
  159. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +76 -0
  160. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +54 -0
  161. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +72 -0
  162. package/model-provider/automatic1111/index.cjs +20 -0
  163. package/model-provider/automatic1111/index.d.ts +2 -0
  164. package/model-provider/automatic1111/index.js +2 -0
  165. package/model-provider/cohere/CohereError.cjs +36 -0
  166. package/model-provider/cohere/CohereError.d.ts +22 -0
  167. package/model-provider/cohere/CohereError.js +28 -0
  168. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +172 -0
  169. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +119 -0
  170. package/model-provider/cohere/CohereTextEmbeddingModel.js +165 -0
  171. package/model-provider/cohere/CohereTextGenerationModel.cjs +283 -0
  172. package/model-provider/cohere/CohereTextGenerationModel.d.ts +203 -0
  173. package/model-provider/cohere/CohereTextGenerationModel.js +276 -0
  174. package/model-provider/cohere/CohereTokenizer.cjs +136 -0
  175. package/model-provider/cohere/CohereTokenizer.d.ts +118 -0
  176. package/model-provider/cohere/CohereTokenizer.js +129 -0
  177. package/model-provider/cohere/index.cjs +22 -0
  178. package/model-provider/cohere/index.d.ts +4 -0
  179. package/model-provider/cohere/index.js +4 -0
  180. package/model-provider/huggingface/HuggingFaceError.cjs +52 -0
  181. package/model-provider/huggingface/HuggingFaceError.d.ts +22 -0
  182. package/model-provider/huggingface/HuggingFaceError.js +44 -0
  183. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +174 -0
  184. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +75 -0
  185. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +167 -0
  186. package/model-provider/huggingface/index.cjs +20 -0
  187. package/model-provider/huggingface/index.d.ts +2 -0
  188. package/model-provider/huggingface/index.js +2 -0
  189. package/model-provider/index.cjs +22 -0
  190. package/model-provider/index.d.ts +6 -0
  191. package/model-provider/index.js +6 -0
  192. package/model-provider/llamacpp/LlamaCppError.cjs +52 -0
  193. package/model-provider/llamacpp/LlamaCppError.d.ts +22 -0
  194. package/model-provider/llamacpp/LlamaCppError.js +44 -0
  195. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +96 -0
  196. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +40 -0
  197. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +89 -0
  198. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +245 -0
  199. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +399 -0
  200. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +238 -0
  201. package/model-provider/llamacpp/LlamaCppTokenizer.cjs +64 -0
  202. package/model-provider/llamacpp/LlamaCppTokenizer.d.ts +38 -0
  203. package/model-provider/llamacpp/LlamaCppTokenizer.js +57 -0
  204. package/model-provider/llamacpp/index.cjs +22 -0
  205. package/model-provider/llamacpp/index.d.ts +4 -0
  206. package/model-provider/llamacpp/index.js +4 -0
  207. package/model-provider/openai/OpenAICostCalculator.cjs +71 -0
  208. package/model-provider/openai/OpenAICostCalculator.d.ts +6 -0
  209. package/model-provider/openai/OpenAICostCalculator.js +67 -0
  210. package/model-provider/openai/OpenAIError.cjs +50 -0
  211. package/model-provider/openai/OpenAIError.d.ts +47 -0
  212. package/model-provider/openai/OpenAIError.js +42 -0
  213. package/model-provider/openai/OpenAIImageGenerationModel.cjs +124 -0
  214. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +113 -0
  215. package/model-provider/openai/OpenAIImageGenerationModel.js +119 -0
  216. package/model-provider/openai/OpenAIModelSettings.cjs +2 -0
  217. package/model-provider/openai/OpenAIModelSettings.d.ts +8 -0
  218. package/model-provider/openai/OpenAIModelSettings.js +1 -0
  219. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +171 -0
  220. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +122 -0
  221. package/model-provider/openai/OpenAITextEmbeddingModel.js +162 -0
  222. package/model-provider/openai/OpenAITextGenerationModel.cjs +326 -0
  223. package/model-provider/openai/OpenAITextGenerationModel.d.ts +254 -0
  224. package/model-provider/openai/OpenAITextGenerationModel.js +317 -0
  225. package/model-provider/openai/OpenAITranscriptionModel.cjs +195 -0
  226. package/model-provider/openai/OpenAITranscriptionModel.d.ts +196 -0
  227. package/model-provider/openai/OpenAITranscriptionModel.js +187 -0
  228. package/model-provider/openai/TikTokenTokenizer.cjs +86 -0
  229. package/model-provider/openai/TikTokenTokenizer.d.ts +35 -0
  230. package/model-provider/openai/TikTokenTokenizer.js +82 -0
  231. package/model-provider/openai/chat/OpenAIChatMessage.cjs +24 -0
  232. package/model-provider/openai/chat/OpenAIChatMessage.d.ts +26 -0
  233. package/model-provider/openai/chat/OpenAIChatMessage.js +21 -0
  234. package/model-provider/openai/chat/OpenAIChatModel.cjs +288 -0
  235. package/model-provider/openai/chat/OpenAIChatModel.d.ts +344 -0
  236. package/model-provider/openai/chat/OpenAIChatModel.js +279 -0
  237. package/model-provider/openai/chat/OpenAIChatPrompt.cjs +143 -0
  238. package/model-provider/openai/chat/OpenAIChatPrompt.d.ts +108 -0
  239. package/model-provider/openai/chat/OpenAIChatPrompt.js +135 -0
  240. package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +112 -0
  241. package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +19 -0
  242. package/model-provider/openai/chat/OpenAIChatStreamIterable.js +105 -0
  243. package/model-provider/openai/chat/countOpenAIChatMessageTokens.cjs +28 -0
  244. package/model-provider/openai/chat/countOpenAIChatMessageTokens.d.ts +20 -0
  245. package/model-provider/openai/chat/countOpenAIChatMessageTokens.js +23 -0
  246. package/model-provider/openai/index.cjs +31 -0
  247. package/model-provider/openai/index.d.ts +13 -0
  248. package/model-provider/openai/index.js +12 -0
  249. package/model-provider/stability/StabilityError.cjs +36 -0
  250. package/model-provider/stability/StabilityError.d.ts +22 -0
  251. package/model-provider/stability/StabilityError.js +28 -0
  252. package/model-provider/stability/StabilityImageGenerationModel.cjs +133 -0
  253. package/model-provider/stability/StabilityImageGenerationModel.d.ts +95 -0
  254. package/model-provider/stability/StabilityImageGenerationModel.js +129 -0
  255. package/model-provider/stability/index.cjs +20 -0
  256. package/model-provider/stability/index.d.ts +2 -0
  257. package/model-provider/stability/index.js +2 -0
  258. package/package.json +87 -0
  259. package/prompt/InstructionPrompt.cjs +2 -0
  260. package/prompt/InstructionPrompt.d.ts +7 -0
  261. package/prompt/InstructionPrompt.js +1 -0
  262. package/prompt/Llama2PromptMapping.cjs +56 -0
  263. package/prompt/Llama2PromptMapping.d.ts +10 -0
  264. package/prompt/Llama2PromptMapping.js +51 -0
  265. package/prompt/OpenAIChatPromptMapping.cjs +62 -0
  266. package/prompt/OpenAIChatPromptMapping.d.ts +6 -0
  267. package/prompt/OpenAIChatPromptMapping.js +57 -0
  268. package/prompt/PromptMapping.cjs +2 -0
  269. package/prompt/PromptMapping.d.ts +7 -0
  270. package/prompt/PromptMapping.js +1 -0
  271. package/prompt/PromptMappingTextGenerationModel.cjs +88 -0
  272. package/prompt/PromptMappingTextGenerationModel.d.ts +26 -0
  273. package/prompt/PromptMappingTextGenerationModel.js +84 -0
  274. package/prompt/TextPromptMapping.cjs +50 -0
  275. package/prompt/TextPromptMapping.d.ts +14 -0
  276. package/prompt/TextPromptMapping.js +45 -0
  277. package/prompt/chat/ChatPrompt.cjs +2 -0
  278. package/prompt/chat/ChatPrompt.d.ts +33 -0
  279. package/prompt/chat/ChatPrompt.js +1 -0
  280. package/prompt/chat/trimChatPrompt.cjs +50 -0
  281. package/prompt/chat/trimChatPrompt.d.ts +19 -0
  282. package/prompt/chat/trimChatPrompt.js +46 -0
  283. package/prompt/chat/validateChatPrompt.cjs +36 -0
  284. package/prompt/chat/validateChatPrompt.d.ts +8 -0
  285. package/prompt/chat/validateChatPrompt.js +31 -0
  286. package/prompt/index.cjs +25 -0
  287. package/prompt/index.d.ts +9 -0
  288. package/prompt/index.js +9 -0
  289. package/run/ConsoleLogger.cjs +12 -0
  290. package/run/ConsoleLogger.d.ts +6 -0
  291. package/run/ConsoleLogger.js +8 -0
  292. package/run/DefaultRun.cjs +78 -0
  293. package/run/DefaultRun.d.ts +24 -0
  294. package/run/DefaultRun.js +74 -0
  295. package/run/IdMetadata.cjs +2 -0
  296. package/run/IdMetadata.d.ts +7 -0
  297. package/run/IdMetadata.js +1 -0
  298. package/run/Run.cjs +2 -0
  299. package/run/Run.d.ts +27 -0
  300. package/run/Run.js +1 -0
  301. package/run/RunFunction.cjs +2 -0
  302. package/run/RunFunction.d.ts +13 -0
  303. package/run/RunFunction.js +1 -0
  304. package/run/Vector.cjs +2 -0
  305. package/run/Vector.d.ts +5 -0
  306. package/run/Vector.js +1 -0
  307. package/run/index.cjs +22 -0
  308. package/run/index.d.ts +6 -0
  309. package/run/index.js +6 -0
  310. package/text-chunk/TextChunk.cjs +2 -0
  311. package/text-chunk/TextChunk.d.ts +3 -0
  312. package/text-chunk/TextChunk.js +1 -0
  313. package/text-chunk/index.cjs +22 -0
  314. package/text-chunk/index.d.ts +6 -0
  315. package/text-chunk/index.js +6 -0
  316. package/text-chunk/retrieve-text-chunks/TextChunkRetriever.cjs +2 -0
  317. package/text-chunk/retrieve-text-chunks/TextChunkRetriever.d.ts +8 -0
  318. package/text-chunk/retrieve-text-chunks/TextChunkRetriever.js +1 -0
  319. package/text-chunk/retrieve-text-chunks/retrieveTextChunks.cjs +10 -0
  320. package/text-chunk/retrieve-text-chunks/retrieveTextChunks.d.ts +6 -0
  321. package/text-chunk/retrieve-text-chunks/retrieveTextChunks.js +6 -0
  322. package/text-chunk/split/SplitFunction.cjs +2 -0
  323. package/text-chunk/split/SplitFunction.d.ts +4 -0
  324. package/text-chunk/split/SplitFunction.js +1 -0
  325. package/text-chunk/split/splitOnSeparator.cjs +12 -0
  326. package/text-chunk/split/splitOnSeparator.d.ts +8 -0
  327. package/text-chunk/split/splitOnSeparator.js +7 -0
  328. package/text-chunk/split/splitRecursively.cjs +41 -0
  329. package/text-chunk/split/splitRecursively.d.ts +22 -0
  330. package/text-chunk/split/splitRecursively.js +33 -0
  331. package/util/DurationMeasurement.cjs +42 -0
  332. package/util/DurationMeasurement.d.ts +5 -0
  333. package/util/DurationMeasurement.js +38 -0
  334. package/util/ErrorHandler.cjs +2 -0
  335. package/util/ErrorHandler.d.ts +1 -0
  336. package/util/ErrorHandler.js +1 -0
  337. package/util/SafeResult.cjs +2 -0
  338. package/util/SafeResult.d.ts +8 -0
  339. package/util/SafeResult.js +1 -0
  340. package/util/api/AbortError.cjs +9 -0
  341. package/util/api/AbortError.d.ts +3 -0
  342. package/util/api/AbortError.js +5 -0
  343. package/util/api/ApiCallError.cjs +45 -0
  344. package/util/api/ApiCallError.d.ts +15 -0
  345. package/util/api/ApiCallError.js +41 -0
  346. package/util/api/RetryError.cjs +24 -0
  347. package/util/api/RetryError.d.ts +10 -0
  348. package/util/api/RetryError.js +20 -0
  349. package/util/api/RetryFunction.cjs +2 -0
  350. package/util/api/RetryFunction.d.ts +1 -0
  351. package/util/api/RetryFunction.js +1 -0
  352. package/util/api/ThrottleFunction.cjs +2 -0
  353. package/util/api/ThrottleFunction.d.ts +1 -0
  354. package/util/api/ThrottleFunction.js +1 -0
  355. package/util/api/callWithRetryAndThrottle.cjs +7 -0
  356. package/util/api/callWithRetryAndThrottle.d.ts +7 -0
  357. package/util/api/callWithRetryAndThrottle.js +3 -0
  358. package/util/api/postToApi.cjs +103 -0
  359. package/util/api/postToApi.d.ts +29 -0
  360. package/util/api/postToApi.js +96 -0
  361. package/util/api/retryNever.cjs +8 -0
  362. package/util/api/retryNever.d.ts +4 -0
  363. package/util/api/retryNever.js +4 -0
  364. package/util/api/retryWithExponentialBackoff.cjs +48 -0
  365. package/util/api/retryWithExponentialBackoff.d.ts +10 -0
  366. package/util/api/retryWithExponentialBackoff.js +44 -0
  367. package/util/api/throttleMaxConcurrency.cjs +65 -0
  368. package/util/api/throttleMaxConcurrency.d.ts +7 -0
  369. package/util/api/throttleMaxConcurrency.js +61 -0
  370. package/util/api/throttleUnlimitedConcurrency.cjs +8 -0
  371. package/util/api/throttleUnlimitedConcurrency.d.ts +5 -0
  372. package/util/api/throttleUnlimitedConcurrency.js +4 -0
  373. package/util/cosineSimilarity.cjs +26 -0
  374. package/util/cosineSimilarity.d.ts +11 -0
  375. package/util/cosineSimilarity.js +22 -0
  376. package/util/index.cjs +26 -0
  377. package/util/index.d.ts +10 -0
  378. package/util/index.js +10 -0
  379. package/util/never.cjs +6 -0
  380. package/util/never.d.ts +1 -0
  381. package/util/never.js +2 -0
  382. package/util/runSafe.cjs +15 -0
  383. package/util/runSafe.d.ts +2 -0
  384. package/util/runSafe.js +11 -0
  385. package/vector-index/VectorIndex.cjs +2 -0
  386. package/vector-index/VectorIndex.d.ts +18 -0
  387. package/vector-index/VectorIndex.js +1 -0
  388. package/vector-index/VectorIndexSimilarTextChunkRetriever.cjs +57 -0
  389. package/vector-index/VectorIndexSimilarTextChunkRetriever.d.ts +20 -0
  390. package/vector-index/VectorIndexSimilarTextChunkRetriever.js +53 -0
  391. package/vector-index/VectorIndexTextChunkStore.cjs +77 -0
  392. package/vector-index/VectorIndexTextChunkStore.d.ts +35 -0
  393. package/vector-index/VectorIndexTextChunkStore.js +73 -0
  394. package/vector-index/index.cjs +22 -0
  395. package/vector-index/index.d.ts +6 -0
  396. package/vector-index/index.js +6 -0
  397. package/vector-index/memory/MemoryVectorIndex.cjs +63 -0
  398. package/vector-index/memory/MemoryVectorIndex.d.ts +31 -0
  399. package/vector-index/memory/MemoryVectorIndex.js +56 -0
  400. package/vector-index/pinecone/PineconeVectorIndex.cjs +66 -0
  401. package/vector-index/pinecone/PineconeVectorIndex.d.ts +29 -0
  402. package/vector-index/pinecone/PineconeVectorIndex.js +62 -0
  403. package/vector-index/upsertTextChunks.cjs +15 -0
  404. package/vector-index/upsertTextChunks.d.ts +11 -0
  405. package/vector-index/upsertTextChunks.js +11 -0
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2023 Lars Grammel
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,429 @@
1
+ # ai-utils.js
2
+
3
+ > ### Build AI applications, chatbots, and agents with JavaScript and TypeScript.
4
+
5
+ [![Created by Lars Grammel](https://img.shields.io/badge/created%20by-@lgrammel-4BBAAB.svg)](https://twitter.com/lgrammel)
6
+ [![NPM Version](https://img.shields.io/npm/v/ai-utils.js?color=33cd56&logo=npm)](https://www.npmjs.com/package/ai-utils.js)
7
+ [![MIT License](https://img.shields.io/github/license/lgrammel/ai-utils.js)](https://opensource.org/licenses/MIT)
8
+
9
+ [Introduction](#introduction) | [Quick Install](#quick-install) | [Usage](#usage-examples) | [Features](#features) | [Integrations](#integrations) | [Documentation](#documentation) | [Examples](#more-examples) | [ai-utils.dev](https://ai-utils.dev)
10
+
11
+ ## Disclaimer
12
+
13
+ `ai-utils.js` is currently in its initial development phase. **Until version 0.1 there may be frequent breaking changes.**
14
+
15
+ ## Introduction
16
+
17
+ `ai-utils.js` is a library for building AI apps, chatbots, and agents. It provides abstractions for working with AI models, vector indices, and tools. It was designed with the following goals in mind:
18
+
19
+ - **Provide type inference and validation**: `ai-utils.js` uses TypeScript and [Zod](https://github.com/colinhacks/zod) to infer types whereever possible and to validate AI responses.
20
+ - **Flexibility and control**: AI application development can be complex and unique to each project. With `ai-utils.js`, you have complete control over the prompts, the model settings, and the control flow of your application. You can also access the full responses from the models and metadata easily to build what you need.
21
+ - **Integrate support features**: Essential features like logging, retries, throttling, and error handling are integrated and easily configurable.
22
+
23
+ ## Quick Install
24
+
25
+ ```sh
26
+ npm install ai-utils.js
27
+ ```
28
+
29
+ You need to install `zod` and a matching version of `zod-to-json-schema` (peer dependencies):
30
+
31
+ ```sh
32
+ npm install zod zod-to-json-schema
33
+ ```
34
+
35
+ ## Usage Examples
36
+
37
+ You can provide API keys for the different [integrations](https://ai-utils.dev/integration/model-provider/) using environment variables (e.g., `OPENAI_API_KEY`) or pass them into the model constructors as options.
38
+
39
+ ### [Generate Text](https://ai-utils.dev/guide/function/generate-text)
40
+
41
+ Generate text using a language model and a prompt.
42
+ You can stream the text if it is supported by the model.
43
+ You can use [prompt mappings](https://ai-utils.dev/guide/function/generate-text/prompt-mapping) to change the prompt format of a model.
44
+
45
+ #### generateText
46
+
47
+ ```ts
48
+ const { text } = await generateText(
49
+ new OpenAITextGenerationModel({ model: "text-davinci-003" }),
50
+ "Write a short story about a robot learning to love:\n\n"
51
+ );
52
+ ```
53
+
54
+ #### streamText
55
+
56
+ ```ts
57
+ const { textStream } = await streamText(
58
+ new OpenAIChatModel({ model: "gpt-3.5-turbo", maxTokens: 1000 }),
59
+ [
60
+ OpenAIChatMessage.system("You are a story writer."),
61
+ OpenAIChatMessage.user("Write a story about a robot learning to love"),
62
+ ]
63
+ );
64
+
65
+ for await (const textFragment of textStream) {
66
+ process.stdout.write(textFragment);
67
+ }
68
+ ```
69
+
70
+ #### Prompt Mapping
71
+
72
+ [Prompt mapping](https://ai-utils.dev/guide/function/generate-text/prompt-mapping) lets you use higher level prompt structures (such as instruction or chat prompts) for different models.
73
+
74
+ ```ts
75
+ const { text } = await generateText(
76
+ new LlamaCppTextGenerationModel({
77
+ contextWindowSize: 4096, // Llama 2 context window size
78
+ nPredict: 1000,
79
+ }).mapPrompt(InstructionToLlama2PromptMapping()),
80
+ {
81
+ system: "You are a story writer.",
82
+ instruction: "Write a short story about a robot learning to love.",
83
+ }
84
+ );
85
+ ```
86
+
87
+ ```ts
88
+ const { textStream } = await streamText(
89
+ new OpenAIChatModel({
90
+ model: "gpt-3.5-turbo",
91
+ }).mapPrompt(ChatToOpenAIChatPromptMapping()),
92
+ [
93
+ { system: "You are a celebrated poet." },
94
+ { user: "Write a short story about a robot learning to love." },
95
+ { ai: "Once upon a time, there was a robot who learned to love." },
96
+ { user: "That's a great start!" },
97
+ ]
98
+ );
99
+ ```
100
+
101
+ #### Metadata and original responses
102
+
103
+ Most `ai-utils.js` model functions return rich results that include the original response and metadata.
104
+
105
+ ```ts
106
+ const { text, response, metadata } = await generateText(
107
+ new OpenAITextGenerationModel({
108
+ model: "text-davinci-003",
109
+ }),
110
+ "Write a short story about a robot learning to love:\n\n"
111
+ );
112
+ ```
113
+
114
+ ### [Generate JSON](https://ai-utils.dev/guide/function/generate-json)
115
+
116
+ Generate JSON value that matches a schema.
117
+
118
+ ```ts
119
+ const { value } = await generateJson(
120
+ new OpenAIChatModel({
121
+ model: "gpt-3.5-turbo",
122
+ temperature: 0,
123
+ maxTokens: 50,
124
+ }),
125
+ {
126
+ name: "sentiment" as const,
127
+ description: "Write the sentiment analysis",
128
+ schema: z.object({
129
+ sentiment: z
130
+ .enum(["positive", "neutral", "negative"])
131
+ .describe("Sentiment."),
132
+ }),
133
+ },
134
+ OpenAIChatFunctionPrompt.forSchemaCurried([
135
+ OpenAIChatMessage.system(
136
+ "You are a sentiment evaluator. " +
137
+ "Analyze the sentiment of the following product review:"
138
+ ),
139
+ OpenAIChatMessage.user(
140
+ "After I opened the package, I was met by a very unpleasant smell " +
141
+ "that did not disappear even after washing. Never again!"
142
+ ),
143
+ ])
144
+ );
145
+ ```
146
+
147
+ ### [Generate JSON or Text](https://ai-utils.dev/guide/function/generate-json-or-text)
148
+
149
+ Generate JSON (or text as a fallback) using a prompt and multiple schemas.
150
+ It either matches one of the schemas or is text reponse.
151
+
152
+ ```ts
153
+ const { schema, value, text } = await generateJsonOrText(
154
+ new OpenAIChatModel({ model: "gpt-3.5-turbo", maxTokens: 1000 }),
155
+ [
156
+ {
157
+ name: "getCurrentWeather" as const, // mark 'as const' for type inference
158
+ description: "Get the current weather in a given location",
159
+ schema: z.object({
160
+ location: z
161
+ .string()
162
+ .describe("The city and state, e.g. San Francisco, CA"),
163
+ unit: z.enum(["celsius", "fahrenheit"]).optional(),
164
+ }),
165
+ },
166
+ {
167
+ name: "getContactInformation" as const,
168
+ description: "Get the contact information for a given person",
169
+ schema: z.object({
170
+ name: z.string().describe("The name of the person"),
171
+ }),
172
+ },
173
+ ],
174
+ OpenAIChatFunctionPrompt.forSchemasCurried([OpenAIChatMessage.user(query)])
175
+ );
176
+ ```
177
+
178
+ ### [Tools](https://ai-utils.dev/guide/tools)
179
+
180
+ Tools are functions that can be executed by an AI model. They are useful for building chatbots and agents.
181
+
182
+ #### Create Tool
183
+
184
+ A tool is a function with a name, a description, and a schema for the input parameters.
185
+
186
+ ```ts
187
+ const calculator = new Tool({
188
+ name: "calculator" as const, // mark 'as const' for type inference
189
+ description: "Execute a calculation",
190
+
191
+ inputSchema: z.object({
192
+ a: z.number().describe("The first number."),
193
+ b: z.number().describe("The second number."),
194
+ operator: z.enum(["+", "-", "*", "/"]).describe("The operator."),
195
+ }),
196
+
197
+ execute: async ({ a, b, operator }) => {
198
+ switch (operator) {
199
+ case "+":
200
+ return a + b;
201
+ case "-":
202
+ return a - b;
203
+ case "*":
204
+ return a * b;
205
+ case "/":
206
+ return a / b;
207
+ default:
208
+ throw new Error(`Unknown operator: ${operator}`);
209
+ }
210
+ },
211
+ });
212
+ ```
213
+
214
+ #### useTool
215
+
216
+ The model determines the parameters for the tool from the prompt and then executes it.
217
+
218
+ ```ts
219
+ const { tool, parameters, result } = await useTool(
220
+ new OpenAIChatModel({ model: "gpt-3.5-turbo" }),
221
+ calculator,
222
+ OpenAIChatFunctionPrompt.forToolCurried([
223
+ OpenAIChatMessage.user("What's fourteen times twelve?"),
224
+ ])
225
+ );
226
+ ```
227
+
228
+ #### useToolOrGenerateText
229
+
230
+ The model determines which tool to use and its parameters from the prompt and then executes it.
231
+ Text is generated as a fallback.
232
+
233
+ ```ts
234
+ const { tool, parameters, result, text } = await useToolOrGenerateText(
235
+ new OpenAIChatModel({ model: "gpt-3.5-turbo" }),
236
+ [calculator /* ... */],
237
+ OpenAIChatFunctionPrompt.forToolsCurried([
238
+ OpenAIChatMessage.user("What's fourteen times twelve?"),
239
+ ])
240
+ );
241
+ ```
242
+
243
+ ### [Transcribe Audio](https://ai-utils.dev/guide/function/transcribe-audio)
244
+
245
+ Turn audio (voice) into text.
246
+
247
+ ```ts
248
+ const { transcription } = await transcribe(
249
+ new OpenAITranscriptionModel({ model: "whisper-1" }),
250
+ {
251
+ type: "mp3",
252
+ data: await fs.promises.readFile("data/test.mp3"),
253
+ }
254
+ );
255
+ ```
256
+
257
+ ### [Generate Image](https://ai-utils.dev/guide/function/generate-image)
258
+
259
+ Generate a base64-encoded image from a prompt.
260
+
261
+ ```ts
262
+ const { image } = await generateImage(
263
+ new OpenAIImageGenerationModel({ size: "512x512" }),
264
+ "the wicked witch of the west in the style of early 19th century painting"
265
+ );
266
+ ```
267
+
268
+ ### [Embed Text](https://ai-utils.dev/guide/function/embed-text)
269
+
270
+ Create embeddings for text. Embeddings are vectors that represent the meaning of the text.
271
+
272
+ ```ts
273
+ const { embeddings } = await embedTexts(
274
+ new OpenAITextEmbeddingModel({ model: "text-embedding-ada-002" }),
275
+ [
276
+ "At first, Nox didn't know what to do with the pup.",
277
+ "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
278
+ ]
279
+ );
280
+ ```
281
+
282
+ ### [Tokenize Text](https://ai-utils.dev/guide/function/tokenize-text)
283
+
284
+ Split text into tokens and reconstruct the text from tokens.
285
+
286
+ ```ts
287
+ const tokenizer = new TikTokenTokenizer({ model: "gpt-4" });
288
+
289
+ const text = "At first, Nox didn't know what to do with the pup.";
290
+
291
+ const tokenCount = await countTokens(tokenizer, text);
292
+
293
+ const tokens = await tokenizer.tokenize(text);
294
+ const tokensAndTokenTexts = await tokenizer.tokenizeWithTexts(text);
295
+ const reconstructedText = await tokenizer.detokenize(tokens);
296
+ ```
297
+
298
+ ### [Upserting and Retrieving Text Chunks from Vector Indices](https://ai-utils.dev/guide/text-chunks)
299
+
300
+ ```ts
301
+ const texts = [
302
+ "A rainbow is an optical phenomenon that can occur under certain meteorological conditions.",
303
+ "It is caused by refraction, internal reflection and dispersion of light in water droplets resulting in a continuous spectrum of light appearing in the sky.",
304
+ // ...
305
+ ];
306
+
307
+ const vectorIndex = new MemoryVectorIndex<TextChunk>();
308
+ const embeddingModel = new OpenAITextEmbeddingModel({
309
+ model: "text-embedding-ada-002",
310
+ });
311
+
312
+ // update an index - usually done as part of an ingestion process:
313
+ await upsertTextChunks({
314
+ vectorIndex,
315
+ embeddingModel,
316
+ chunks: texts.map((text) => ({ content: text })),
317
+ });
318
+
319
+ // retrieve text chunks from the vector index - usually done at query time:
320
+ const { chunks } = await retrieveTextChunks(
321
+ new VectorIndexSimilarTextChunkRetriever({
322
+ vectorIndex,
323
+ embeddingModel,
324
+ maxResults: 3,
325
+ similarityThreshold: 0.8,
326
+ }),
327
+ "rainbow and water droplets"
328
+ );
329
+ ```
330
+
331
+ ## Features
332
+
333
+ - [Model Functions](https://ai-utils.dev/guide/function/)
334
+ - [Generate and stream text](https://ai-utils.dev/guide/function/generate-text)
335
+ - [Generate JSON](https://ai-utils.dev/guide/function/generate-json)
336
+ - [Generate JSON or text](https://ai-utils.dev/guide/function/generate-json-or-text)
337
+ - [Embed Text](https://ai-utils.dev/guide/function/embed-text)
338
+ - [Tokenize Text](https://ai-utils.dev/guide/function/tokenize-text)
339
+ - [Transcribe Audio](https://ai-utils.dev/guide/function/transcribe-audio)
340
+ - [Generate images](https://ai-utils.dev/guide/function/generate-image)
341
+ - Summarize text
342
+ - Split text
343
+ - [Tools](https://ai-utils.dev/guide/tools)
344
+ - [Text Chunks](https://ai-utils.dev/guide/text-chunks)
345
+ - [Run abstraction](https://ai-utils.dev/guide/run/)
346
+ - [Abort signals](https://ai-utils.dev/guide/run/abort)
347
+ - [Cost calculation](https://ai-utils.dev/guide/run/cost-calculation)
348
+ - Call recording
349
+ - Utilities
350
+ - [Retry strategies](https://ai-utils.dev/guide/util/retry)
351
+ - [Throttling strategies](https://ai-utils.dev/guide/util/throttle)
352
+ - Error handling
353
+
354
+ ## Integrations
355
+
356
+ ### Model Providers
357
+
358
+ | | [OpenAI](https://ai-utils.dev/integration/model-provider/openai) | [Cohere](https://ai-utils.dev/integration/model-provider/cohere) | [Llama.cpp](https://ai-utils.dev/integration/model-provider/llamacpp) | [Hugging Face](https://ai-utils.dev/integration/model-provider/huggingface) | [Stability AI](https://ai-utils.dev/integration/model-provider/stability) | [Automatic1111](https://ai-utils.dev/integration/model-provider/automatic1111) |
359
+ | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | --------------------------------------------------------------------- | --------------------------------------------------------------------------- | ------------------------------------------------------------------------- | ------------------------------------------------------------------------------ |
360
+ | Hosting | cloud | cloud | server (local) | cloud | cloud | server (local) |
361
+ | [Generate text](https://ai-utils.dev/guide/function/generate-text) | ✅ | ✅ | ✅ | ✅ | | |
362
+ | [Stream text](https://ai-utils.dev/guide/function/generate-text) | ✅ | ✅ | ✅ | | | |
363
+ | [Generate JSON](https://ai-utils.dev/guide/function/generate-json) | chat models | | | | | |
364
+ | [Generate JSON or Text](https://ai-utils.dev/guide/function/generate-json-or-text) | chat models | | | | | |
365
+ | [Embed text](https://ai-utils.dev/guide/function/embed-text) | ✅ | ✅ | ✅ | | | |
366
+ | [Tokenize text](https://ai-utils.dev/guide/function/tokenize-text) | full | full | basic | | | |
367
+ | [Generate image](https://ai-utils.dev/guide/function/generate-image) | ✅ | | | | ✅ | ✅ |
368
+ | [Transcribe audio](https://ai-utils.dev/guide/function/transcribe-audio) | ✅ | | | | | |
369
+ | [Cost calculation](https://ai-utils.dev/guide/run/cost-calculation) | ✅ | | | | | |
370
+
371
+ ### Vector Indices
372
+
373
+ - [Memory](https://ai-utils.dev/integration/vector-index/memory)
374
+ - [Pinecone](https://ai-utils.dev/integration/vector-index/pinecone)
375
+
376
+ ## Documentation
377
+
378
+ - [Guide](https://ai-utils.dev/guide)
379
+ - [Examples & Tutorials](https://ai-utils.dev/tutorial)
380
+ - [Integrations](https://ai-utils.dev/integration/model-provider)
381
+ - [API Reference](https://ai-utils.dev/api/modules)
382
+
383
+ ## More Examples
384
+
385
+ ### [Basic Examples](https://github.com/lgrammel/ai-utils.js/tree/main/examples/basic)
386
+
387
+ Examples for the individual functions and objects.
388
+
389
+ ### [PDF to Tweet](https://github.com/lgrammel/ai-utils.js/tree/main/examples/pdf-to-tweet)
390
+
391
+ > _terminal app_, _PDF parsing_, _recursive information extraction_, _in memory vector index, \_style example retrieval_, _OpenAI GPT-4_, _cost calculation_
392
+
393
+ Extracts information about a topic from a PDF and writes a tweet in your own style about it.
394
+
395
+ ### [AI Chat (Next.JS)](https://github.com/lgrammel/ai-utils.js/tree/main/examples/ai-chat-next-js)
396
+
397
+ > _Next.js app_, _OpenAI GPT-3.5-turbo_, _streaming_, _abort handling_
398
+
399
+ A basic web chat with an AI assistant, implemented as a Next.js app.
400
+
401
+ ### [Image generator (Next.js)](https://github.com/lgrammel/ai-utils.js/tree/main/examples/image-generator-next-js)
402
+
403
+ > _Next.js app_, _Stability AI image generation_
404
+
405
+ Create an 19th century painting image for your input.
406
+
407
+ ### [Voice recording and transcription (Next.js)](https://github.com/lgrammel/ai-utils.js/tree/main/examples/voice-recording-next-js)
408
+
409
+ > _Next.js app_, _OpenAI Whisper_
410
+
411
+ Record audio with push-to-talk and transcribe it using Whisper, implemented as a Next.js app. The app shows a list of the transcriptions.
412
+
413
+ ### [BabyAGI Classic](https://github.com/lgrammel/ai-utils.js/tree/main/examples/baby-agi)
414
+
415
+ > _terminal app_, _agent_, _BabyAGI_, _OpenAI text-davinci-003_
416
+
417
+ TypeScript implementation of the classic [BabyAGI](https://github.com/yoheinakajima/babyagi/blob/main/classic/babyagi.py) by [@yoheinakajima](https://twitter.com/yoheinakajima) without embeddings.
418
+
419
+ ### [Middle school math](https://github.com/lgrammel/ai-utils.js/tree/main/examples/middle-school-math)
420
+
421
+ > _terminal app_, _agent_, _tools_, _GPT-4_
422
+
423
+ Small agent that solves middle school math problems. It uses a calculator tool to solve the problems.
424
+
425
+ ### [Terminal Chat (llama.cpp)](https://github.com/lgrammel/ai-utils.js/tree/main/examples/terminal-chat-llamacpp)
426
+
427
+ > _Terminal app_, _chat_, _llama.cpp_
428
+
429
+ A terminal chat with a Llama.cpp server backend.
@@ -0,0 +1,22 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ __exportStar(require("./use-tool/NoSuchToolError.cjs"), exports);
18
+ __exportStar(require("./use-tool/Tool.cjs"), exports);
19
+ __exportStar(require("./use-tool/useTool.cjs"), exports);
20
+ __exportStar(require("./summarize/SummarizationFunction.cjs"), exports);
21
+ __exportStar(require("./summarize/summarizeRecursively.cjs"), exports);
22
+ __exportStar(require("./summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs"), exports);
@@ -0,0 +1,6 @@
1
+ export * from "./use-tool/NoSuchToolError.js";
2
+ export * from "./use-tool/Tool.js";
3
+ export * from "./use-tool/useTool.js";
4
+ export * from "./summarize/SummarizationFunction.js";
5
+ export * from "./summarize/summarizeRecursively.js";
6
+ export * from "./summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js";
@@ -0,0 +1,6 @@
1
+ export * from "./use-tool/NoSuchToolError.js";
2
+ export * from "./use-tool/Tool.js";
3
+ export * from "./use-tool/useTool.js";
4
+ export * from "./summarize/SummarizationFunction.js";
5
+ export * from "./summarize/summarizeRecursively.js";
6
+ export * from "./summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js";
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,4 @@
1
+ import { RunFunction } from "../../run/RunFunction.js";
2
+ export type SummarizationFunction = RunFunction<{
3
+ text: string;
4
+ }, string>;
@@ -0,0 +1,19 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.summarizeRecursively = void 0;
4
+ async function summarizeRecursively({ summarize, split, join = (texts) => texts.join("\n\n"), text, }, options) {
5
+ const chunks = await split({ text });
6
+ const summarizedTexts = await Promise.all(chunks.map((chunk) => summarize({ text: chunk }, options)));
7
+ if (summarizedTexts.length === 1) {
8
+ return summarizedTexts[0];
9
+ }
10
+ // recursive mapping: will split joined results as needed to stay
11
+ // within the allowed size limit of the splitter.
12
+ return summarizeRecursively({
13
+ text: join(summarizedTexts),
14
+ summarize,
15
+ split,
16
+ join,
17
+ }, options);
18
+ }
19
+ exports.summarizeRecursively = summarizeRecursively;
@@ -0,0 +1,11 @@
1
+ import { Run } from "../../run/Run.js";
2
+ import { SplitFunction } from "../../text-chunk/split/SplitFunction.js";
3
+ import { SummarizationFunction } from "./SummarizationFunction.js";
4
+ export declare function summarizeRecursively({ summarize, split, join, text, }: {
5
+ summarize: SummarizationFunction;
6
+ split: SplitFunction;
7
+ join?: (texts: Array<string>) => string;
8
+ text: string;
9
+ }, options?: {
10
+ run?: Run;
11
+ }): Promise<string>;
@@ -0,0 +1,15 @@
1
+ export async function summarizeRecursively({ summarize, split, join = (texts) => texts.join("\n\n"), text, }, options) {
2
+ const chunks = await split({ text });
3
+ const summarizedTexts = await Promise.all(chunks.map((chunk) => summarize({ text: chunk }, options)));
4
+ if (summarizedTexts.length === 1) {
5
+ return summarizedTexts[0];
6
+ }
7
+ // recursive mapping: will split joined results as needed to stay
8
+ // within the allowed size limit of the splitter.
9
+ return summarizeRecursively({
10
+ text: join(summarizedTexts),
11
+ summarize,
12
+ split,
13
+ join,
14
+ }, options);
15
+ }
@@ -0,0 +1,29 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.summarizeRecursivelyWithTextGenerationAndTokenSplitting = void 0;
4
+ const generateText_js_1 = require("../../model-function/generate-text/generateText.cjs");
5
+ const splitRecursively_js_1 = require("../../text-chunk/split/splitRecursively.cjs");
6
+ const summarizeRecursively_js_1 = require("./summarizeRecursively.cjs");
7
+ /**
8
+ * Recursively summarizes a text using a text generation model, e.g. for summarization or text extraction.
9
+ * It automatically splits the text into optimal chunks that are small enough to be processed by the model,
10
+ * while leaving enough space for the model to generate text.
11
+ */
12
+ async function summarizeRecursivelyWithTextGenerationAndTokenSplitting({ text, model, prompt, reservedCompletionTokens, join, }, options) {
13
+ const emptyPromptTokens = await model.countPromptTokens(await prompt({ text: "" }));
14
+ return (0, summarizeRecursively_js_1.summarizeRecursively)({
15
+ split: (0, splitRecursively_js_1.splitRecursivelyAtTokenAsSplitFunction)({
16
+ tokenizer: model.tokenizer,
17
+ maxChunkSize: model.contextWindowSize -
18
+ reservedCompletionTokens -
19
+ emptyPromptTokens,
20
+ }),
21
+ summarize: async (input) => {
22
+ const { text } = await (0, generateText_js_1.generateText)(model.withMaxCompletionTokens(reservedCompletionTokens), await prompt(input), options);
23
+ return text;
24
+ },
25
+ join,
26
+ text,
27
+ }, options);
28
+ }
29
+ exports.summarizeRecursivelyWithTextGenerationAndTokenSplitting = summarizeRecursivelyWithTextGenerationAndTokenSplitting;
@@ -0,0 +1,24 @@
1
+ import { TextGenerationModelSettings, TextGenerationModel } from "../../model-function/generate-text/TextGenerationModel.js";
2
+ import { FullTokenizer } from "../../model-function/tokenize-text/Tokenizer.js";
3
+ import { Run } from "../../run/Run.js";
4
+ /**
5
+ * Recursively summarizes a text using a text generation model, e.g. for summarization or text extraction.
6
+ * It automatically splits the text into optimal chunks that are small enough to be processed by the model,
7
+ * while leaving enough space for the model to generate text.
8
+ */
9
+ export declare function summarizeRecursivelyWithTextGenerationAndTokenSplitting<PROMPT>({ text, model, prompt, reservedCompletionTokens, join, }: {
10
+ text: string;
11
+ model: TextGenerationModel<PROMPT, any, any, TextGenerationModelSettings> & {
12
+ contextWindowSize: number;
13
+ tokenizer: FullTokenizer;
14
+ countPromptTokens: (prompt: PROMPT) => PromiseLike<number>;
15
+ };
16
+ prompt: (input: {
17
+ text: string;
18
+ }) => Promise<PROMPT>;
19
+ reservedCompletionTokens: number;
20
+ join?: (texts: Array<string>) => string;
21
+ }, options?: {
22
+ functionId?: string;
23
+ run?: Run;
24
+ }): Promise<string>;
@@ -0,0 +1,25 @@
1
+ import { generateText } from "../../model-function/generate-text/generateText.js";
2
+ import { splitRecursivelyAtTokenAsSplitFunction } from "../../text-chunk/split/splitRecursively.js";
3
+ import { summarizeRecursively } from "./summarizeRecursively.js";
4
+ /**
5
+ * Recursively summarizes a text using a text generation model, e.g. for summarization or text extraction.
6
+ * It automatically splits the text into optimal chunks that are small enough to be processed by the model,
7
+ * while leaving enough space for the model to generate text.
8
+ */
9
+ export async function summarizeRecursivelyWithTextGenerationAndTokenSplitting({ text, model, prompt, reservedCompletionTokens, join, }, options) {
10
+ const emptyPromptTokens = await model.countPromptTokens(await prompt({ text: "" }));
11
+ return summarizeRecursively({
12
+ split: splitRecursivelyAtTokenAsSplitFunction({
13
+ tokenizer: model.tokenizer,
14
+ maxChunkSize: model.contextWindowSize -
15
+ reservedCompletionTokens -
16
+ emptyPromptTokens,
17
+ }),
18
+ summarize: async (input) => {
19
+ const { text } = await generateText(model.withMaxCompletionTokens(reservedCompletionTokens), await prompt(input), options);
20
+ return text;
21
+ },
22
+ join,
23
+ text,
24
+ }, options);
25
+ }
@@ -0,0 +1,17 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.NoSuchToolError = void 0;
4
+ class NoSuchToolError extends Error {
5
+ constructor(toolName) {
6
+ super(`No such tool: ${toolName}`);
7
+ Object.defineProperty(this, "toolName", {
8
+ enumerable: true,
9
+ configurable: true,
10
+ writable: true,
11
+ value: void 0
12
+ });
13
+ this.name = "NoSuchToolError";
14
+ this.toolName = toolName;
15
+ }
16
+ }
17
+ exports.NoSuchToolError = NoSuchToolError;
@@ -0,0 +1,4 @@
1
+ export declare class NoSuchToolError extends Error {
2
+ readonly toolName: string;
3
+ constructor(toolName: string);
4
+ }