modelfusion 0.0.44

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (405) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +429 -0
  3. package/composed-function/index.cjs +22 -0
  4. package/composed-function/index.d.ts +6 -0
  5. package/composed-function/index.js +6 -0
  6. package/composed-function/summarize/SummarizationFunction.cjs +2 -0
  7. package/composed-function/summarize/SummarizationFunction.d.ts +4 -0
  8. package/composed-function/summarize/SummarizationFunction.js +1 -0
  9. package/composed-function/summarize/summarizeRecursively.cjs +19 -0
  10. package/composed-function/summarize/summarizeRecursively.d.ts +11 -0
  11. package/composed-function/summarize/summarizeRecursively.js +15 -0
  12. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +29 -0
  13. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +24 -0
  14. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +25 -0
  15. package/composed-function/use-tool/NoSuchToolError.cjs +17 -0
  16. package/composed-function/use-tool/NoSuchToolError.d.ts +4 -0
  17. package/composed-function/use-tool/NoSuchToolError.js +13 -0
  18. package/composed-function/use-tool/Tool.cjs +43 -0
  19. package/composed-function/use-tool/Tool.d.ts +15 -0
  20. package/composed-function/use-tool/Tool.js +39 -0
  21. package/composed-function/use-tool/useTool.cjs +59 -0
  22. package/composed-function/use-tool/useTool.d.ts +36 -0
  23. package/composed-function/use-tool/useTool.js +54 -0
  24. package/cost/Cost.cjs +38 -0
  25. package/cost/Cost.d.ts +16 -0
  26. package/cost/Cost.js +34 -0
  27. package/cost/CostCalculator.cjs +2 -0
  28. package/cost/CostCalculator.d.ts +8 -0
  29. package/cost/CostCalculator.js +1 -0
  30. package/cost/calculateCost.cjs +28 -0
  31. package/cost/calculateCost.d.ts +7 -0
  32. package/cost/calculateCost.js +24 -0
  33. package/cost/index.cjs +19 -0
  34. package/cost/index.d.ts +3 -0
  35. package/cost/index.js +3 -0
  36. package/index.cjs +25 -0
  37. package/index.d.ts +9 -0
  38. package/index.js +9 -0
  39. package/model-function/AbstractModel.cjs +22 -0
  40. package/model-function/AbstractModel.d.ts +12 -0
  41. package/model-function/AbstractModel.js +18 -0
  42. package/model-function/FunctionOptions.cjs +2 -0
  43. package/model-function/FunctionOptions.d.ts +6 -0
  44. package/model-function/FunctionOptions.js +1 -0
  45. package/model-function/Model.cjs +2 -0
  46. package/model-function/Model.d.ts +23 -0
  47. package/model-function/Model.js +1 -0
  48. package/model-function/ModelCallEvent.cjs +2 -0
  49. package/model-function/ModelCallEvent.d.ts +18 -0
  50. package/model-function/ModelCallEvent.js +1 -0
  51. package/model-function/ModelCallEventSource.cjs +42 -0
  52. package/model-function/ModelCallEventSource.d.ts +13 -0
  53. package/model-function/ModelCallEventSource.js +38 -0
  54. package/model-function/ModelCallObserver.cjs +2 -0
  55. package/model-function/ModelCallObserver.d.ts +5 -0
  56. package/model-function/ModelCallObserver.js +1 -0
  57. package/model-function/ModelInformation.cjs +2 -0
  58. package/model-function/ModelInformation.d.ts +4 -0
  59. package/model-function/ModelInformation.js +1 -0
  60. package/model-function/SuccessfulModelCall.cjs +22 -0
  61. package/model-function/SuccessfulModelCall.d.ts +9 -0
  62. package/model-function/SuccessfulModelCall.js +18 -0
  63. package/model-function/embed-text/TextEmbeddingEvent.cjs +2 -0
  64. package/model-function/embed-text/TextEmbeddingEvent.d.ts +23 -0
  65. package/model-function/embed-text/TextEmbeddingEvent.js +1 -0
  66. package/model-function/embed-text/TextEmbeddingModel.cjs +2 -0
  67. package/model-function/embed-text/TextEmbeddingModel.d.ts +18 -0
  68. package/model-function/embed-text/TextEmbeddingModel.js +1 -0
  69. package/model-function/embed-text/embedText.cjs +90 -0
  70. package/model-function/embed-text/embedText.d.ts +33 -0
  71. package/model-function/embed-text/embedText.js +85 -0
  72. package/model-function/executeCall.cjs +60 -0
  73. package/model-function/executeCall.d.ts +27 -0
  74. package/model-function/executeCall.js +56 -0
  75. package/model-function/generate-image/ImageGenerationEvent.cjs +2 -0
  76. package/model-function/generate-image/ImageGenerationEvent.d.ts +22 -0
  77. package/model-function/generate-image/ImageGenerationEvent.js +1 -0
  78. package/model-function/generate-image/ImageGenerationModel.cjs +2 -0
  79. package/model-function/generate-image/ImageGenerationModel.d.ts +8 -0
  80. package/model-function/generate-image/ImageGenerationModel.js +1 -0
  81. package/model-function/generate-image/generateImage.cjs +63 -0
  82. package/model-function/generate-image/generateImage.d.ts +23 -0
  83. package/model-function/generate-image/generateImage.js +59 -0
  84. package/model-function/generate-json/GenerateJsonModel.cjs +2 -0
  85. package/model-function/generate-json/GenerateJsonModel.d.ts +10 -0
  86. package/model-function/generate-json/GenerateJsonModel.js +1 -0
  87. package/model-function/generate-json/GenerateJsonOrTextModel.cjs +2 -0
  88. package/model-function/generate-json/GenerateJsonOrTextModel.d.ts +18 -0
  89. package/model-function/generate-json/GenerateJsonOrTextModel.js +1 -0
  90. package/model-function/generate-json/JsonGenerationEvent.cjs +2 -0
  91. package/model-function/generate-json/JsonGenerationEvent.d.ts +22 -0
  92. package/model-function/generate-json/JsonGenerationEvent.js +1 -0
  93. package/model-function/generate-json/NoSuchSchemaError.cjs +17 -0
  94. package/model-function/generate-json/NoSuchSchemaError.d.ts +4 -0
  95. package/model-function/generate-json/NoSuchSchemaError.js +13 -0
  96. package/model-function/generate-json/SchemaDefinition.cjs +2 -0
  97. package/model-function/generate-json/SchemaDefinition.d.ts +6 -0
  98. package/model-function/generate-json/SchemaDefinition.js +1 -0
  99. package/model-function/generate-json/SchemaValidationError.cjs +36 -0
  100. package/model-function/generate-json/SchemaValidationError.d.ts +11 -0
  101. package/model-function/generate-json/SchemaValidationError.js +32 -0
  102. package/model-function/generate-json/generateJson.cjs +61 -0
  103. package/model-function/generate-json/generateJson.d.ts +9 -0
  104. package/model-function/generate-json/generateJson.js +57 -0
  105. package/model-function/generate-json/generateJsonOrText.cjs +74 -0
  106. package/model-function/generate-json/generateJsonOrText.d.ts +25 -0
  107. package/model-function/generate-json/generateJsonOrText.js +70 -0
  108. package/model-function/generate-text/AsyncQueue.cjs +66 -0
  109. package/model-function/generate-text/AsyncQueue.d.ts +17 -0
  110. package/model-function/generate-text/AsyncQueue.js +62 -0
  111. package/model-function/generate-text/DeltaEvent.cjs +2 -0
  112. package/model-function/generate-text/DeltaEvent.d.ts +7 -0
  113. package/model-function/generate-text/DeltaEvent.js +1 -0
  114. package/model-function/generate-text/TextDeltaEventSource.cjs +54 -0
  115. package/model-function/generate-text/TextDeltaEventSource.d.ts +5 -0
  116. package/model-function/generate-text/TextDeltaEventSource.js +46 -0
  117. package/model-function/generate-text/TextGenerationEvent.cjs +2 -0
  118. package/model-function/generate-text/TextGenerationEvent.d.ts +22 -0
  119. package/model-function/generate-text/TextGenerationEvent.js +1 -0
  120. package/model-function/generate-text/TextGenerationModel.cjs +2 -0
  121. package/model-function/generate-text/TextGenerationModel.d.ts +42 -0
  122. package/model-function/generate-text/TextGenerationModel.js +1 -0
  123. package/model-function/generate-text/TextStreamingEvent.cjs +2 -0
  124. package/model-function/generate-text/TextStreamingEvent.d.ts +22 -0
  125. package/model-function/generate-text/TextStreamingEvent.js +1 -0
  126. package/model-function/generate-text/extractTextDeltas.cjs +23 -0
  127. package/model-function/generate-text/extractTextDeltas.d.ts +7 -0
  128. package/model-function/generate-text/extractTextDeltas.js +19 -0
  129. package/model-function/generate-text/generateText.cjs +67 -0
  130. package/model-function/generate-text/generateText.d.ts +20 -0
  131. package/model-function/generate-text/generateText.js +63 -0
  132. package/model-function/generate-text/parseEventSourceReadableStream.cjs +30 -0
  133. package/model-function/generate-text/parseEventSourceReadableStream.d.ts +8 -0
  134. package/model-function/generate-text/parseEventSourceReadableStream.js +26 -0
  135. package/model-function/generate-text/streamText.cjs +115 -0
  136. package/model-function/generate-text/streamText.d.ts +11 -0
  137. package/model-function/generate-text/streamText.js +111 -0
  138. package/model-function/index.cjs +47 -0
  139. package/model-function/index.d.ts +31 -0
  140. package/model-function/index.js +31 -0
  141. package/model-function/tokenize-text/Tokenizer.cjs +2 -0
  142. package/model-function/tokenize-text/Tokenizer.d.ts +19 -0
  143. package/model-function/tokenize-text/Tokenizer.js +1 -0
  144. package/model-function/tokenize-text/countTokens.cjs +10 -0
  145. package/model-function/tokenize-text/countTokens.d.ts +5 -0
  146. package/model-function/tokenize-text/countTokens.js +6 -0
  147. package/model-function/transcribe-audio/TranscriptionEvent.cjs +2 -0
  148. package/model-function/transcribe-audio/TranscriptionEvent.d.ts +22 -0
  149. package/model-function/transcribe-audio/TranscriptionEvent.js +1 -0
  150. package/model-function/transcribe-audio/TranscriptionModel.cjs +2 -0
  151. package/model-function/transcribe-audio/TranscriptionModel.d.ts +8 -0
  152. package/model-function/transcribe-audio/TranscriptionModel.js +1 -0
  153. package/model-function/transcribe-audio/transcribe.cjs +62 -0
  154. package/model-function/transcribe-audio/transcribe.d.ts +22 -0
  155. package/model-function/transcribe-audio/transcribe.js +58 -0
  156. package/model-provider/automatic1111/Automatic1111Error.cjs +39 -0
  157. package/model-provider/automatic1111/Automatic1111Error.d.ts +31 -0
  158. package/model-provider/automatic1111/Automatic1111Error.js +31 -0
  159. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +76 -0
  160. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +54 -0
  161. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +72 -0
  162. package/model-provider/automatic1111/index.cjs +20 -0
  163. package/model-provider/automatic1111/index.d.ts +2 -0
  164. package/model-provider/automatic1111/index.js +2 -0
  165. package/model-provider/cohere/CohereError.cjs +36 -0
  166. package/model-provider/cohere/CohereError.d.ts +22 -0
  167. package/model-provider/cohere/CohereError.js +28 -0
  168. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +172 -0
  169. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +119 -0
  170. package/model-provider/cohere/CohereTextEmbeddingModel.js +165 -0
  171. package/model-provider/cohere/CohereTextGenerationModel.cjs +283 -0
  172. package/model-provider/cohere/CohereTextGenerationModel.d.ts +203 -0
  173. package/model-provider/cohere/CohereTextGenerationModel.js +276 -0
  174. package/model-provider/cohere/CohereTokenizer.cjs +136 -0
  175. package/model-provider/cohere/CohereTokenizer.d.ts +118 -0
  176. package/model-provider/cohere/CohereTokenizer.js +129 -0
  177. package/model-provider/cohere/index.cjs +22 -0
  178. package/model-provider/cohere/index.d.ts +4 -0
  179. package/model-provider/cohere/index.js +4 -0
  180. package/model-provider/huggingface/HuggingFaceError.cjs +52 -0
  181. package/model-provider/huggingface/HuggingFaceError.d.ts +22 -0
  182. package/model-provider/huggingface/HuggingFaceError.js +44 -0
  183. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +174 -0
  184. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +75 -0
  185. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +167 -0
  186. package/model-provider/huggingface/index.cjs +20 -0
  187. package/model-provider/huggingface/index.d.ts +2 -0
  188. package/model-provider/huggingface/index.js +2 -0
  189. package/model-provider/index.cjs +22 -0
  190. package/model-provider/index.d.ts +6 -0
  191. package/model-provider/index.js +6 -0
  192. package/model-provider/llamacpp/LlamaCppError.cjs +52 -0
  193. package/model-provider/llamacpp/LlamaCppError.d.ts +22 -0
  194. package/model-provider/llamacpp/LlamaCppError.js +44 -0
  195. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +96 -0
  196. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +40 -0
  197. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +89 -0
  198. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +245 -0
  199. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +399 -0
  200. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +238 -0
  201. package/model-provider/llamacpp/LlamaCppTokenizer.cjs +64 -0
  202. package/model-provider/llamacpp/LlamaCppTokenizer.d.ts +38 -0
  203. package/model-provider/llamacpp/LlamaCppTokenizer.js +57 -0
  204. package/model-provider/llamacpp/index.cjs +22 -0
  205. package/model-provider/llamacpp/index.d.ts +4 -0
  206. package/model-provider/llamacpp/index.js +4 -0
  207. package/model-provider/openai/OpenAICostCalculator.cjs +71 -0
  208. package/model-provider/openai/OpenAICostCalculator.d.ts +6 -0
  209. package/model-provider/openai/OpenAICostCalculator.js +67 -0
  210. package/model-provider/openai/OpenAIError.cjs +50 -0
  211. package/model-provider/openai/OpenAIError.d.ts +47 -0
  212. package/model-provider/openai/OpenAIError.js +42 -0
  213. package/model-provider/openai/OpenAIImageGenerationModel.cjs +124 -0
  214. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +113 -0
  215. package/model-provider/openai/OpenAIImageGenerationModel.js +119 -0
  216. package/model-provider/openai/OpenAIModelSettings.cjs +2 -0
  217. package/model-provider/openai/OpenAIModelSettings.d.ts +8 -0
  218. package/model-provider/openai/OpenAIModelSettings.js +1 -0
  219. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +171 -0
  220. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +122 -0
  221. package/model-provider/openai/OpenAITextEmbeddingModel.js +162 -0
  222. package/model-provider/openai/OpenAITextGenerationModel.cjs +326 -0
  223. package/model-provider/openai/OpenAITextGenerationModel.d.ts +254 -0
  224. package/model-provider/openai/OpenAITextGenerationModel.js +317 -0
  225. package/model-provider/openai/OpenAITranscriptionModel.cjs +195 -0
  226. package/model-provider/openai/OpenAITranscriptionModel.d.ts +196 -0
  227. package/model-provider/openai/OpenAITranscriptionModel.js +187 -0
  228. package/model-provider/openai/TikTokenTokenizer.cjs +86 -0
  229. package/model-provider/openai/TikTokenTokenizer.d.ts +35 -0
  230. package/model-provider/openai/TikTokenTokenizer.js +82 -0
  231. package/model-provider/openai/chat/OpenAIChatMessage.cjs +24 -0
  232. package/model-provider/openai/chat/OpenAIChatMessage.d.ts +26 -0
  233. package/model-provider/openai/chat/OpenAIChatMessage.js +21 -0
  234. package/model-provider/openai/chat/OpenAIChatModel.cjs +288 -0
  235. package/model-provider/openai/chat/OpenAIChatModel.d.ts +344 -0
  236. package/model-provider/openai/chat/OpenAIChatModel.js +279 -0
  237. package/model-provider/openai/chat/OpenAIChatPrompt.cjs +143 -0
  238. package/model-provider/openai/chat/OpenAIChatPrompt.d.ts +108 -0
  239. package/model-provider/openai/chat/OpenAIChatPrompt.js +135 -0
  240. package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +112 -0
  241. package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +19 -0
  242. package/model-provider/openai/chat/OpenAIChatStreamIterable.js +105 -0
  243. package/model-provider/openai/chat/countOpenAIChatMessageTokens.cjs +28 -0
  244. package/model-provider/openai/chat/countOpenAIChatMessageTokens.d.ts +20 -0
  245. package/model-provider/openai/chat/countOpenAIChatMessageTokens.js +23 -0
  246. package/model-provider/openai/index.cjs +31 -0
  247. package/model-provider/openai/index.d.ts +13 -0
  248. package/model-provider/openai/index.js +12 -0
  249. package/model-provider/stability/StabilityError.cjs +36 -0
  250. package/model-provider/stability/StabilityError.d.ts +22 -0
  251. package/model-provider/stability/StabilityError.js +28 -0
  252. package/model-provider/stability/StabilityImageGenerationModel.cjs +133 -0
  253. package/model-provider/stability/StabilityImageGenerationModel.d.ts +95 -0
  254. package/model-provider/stability/StabilityImageGenerationModel.js +129 -0
  255. package/model-provider/stability/index.cjs +20 -0
  256. package/model-provider/stability/index.d.ts +2 -0
  257. package/model-provider/stability/index.js +2 -0
  258. package/package.json +87 -0
  259. package/prompt/InstructionPrompt.cjs +2 -0
  260. package/prompt/InstructionPrompt.d.ts +7 -0
  261. package/prompt/InstructionPrompt.js +1 -0
  262. package/prompt/Llama2PromptMapping.cjs +56 -0
  263. package/prompt/Llama2PromptMapping.d.ts +10 -0
  264. package/prompt/Llama2PromptMapping.js +51 -0
  265. package/prompt/OpenAIChatPromptMapping.cjs +62 -0
  266. package/prompt/OpenAIChatPromptMapping.d.ts +6 -0
  267. package/prompt/OpenAIChatPromptMapping.js +57 -0
  268. package/prompt/PromptMapping.cjs +2 -0
  269. package/prompt/PromptMapping.d.ts +7 -0
  270. package/prompt/PromptMapping.js +1 -0
  271. package/prompt/PromptMappingTextGenerationModel.cjs +88 -0
  272. package/prompt/PromptMappingTextGenerationModel.d.ts +26 -0
  273. package/prompt/PromptMappingTextGenerationModel.js +84 -0
  274. package/prompt/TextPromptMapping.cjs +50 -0
  275. package/prompt/TextPromptMapping.d.ts +14 -0
  276. package/prompt/TextPromptMapping.js +45 -0
  277. package/prompt/chat/ChatPrompt.cjs +2 -0
  278. package/prompt/chat/ChatPrompt.d.ts +33 -0
  279. package/prompt/chat/ChatPrompt.js +1 -0
  280. package/prompt/chat/trimChatPrompt.cjs +50 -0
  281. package/prompt/chat/trimChatPrompt.d.ts +19 -0
  282. package/prompt/chat/trimChatPrompt.js +46 -0
  283. package/prompt/chat/validateChatPrompt.cjs +36 -0
  284. package/prompt/chat/validateChatPrompt.d.ts +8 -0
  285. package/prompt/chat/validateChatPrompt.js +31 -0
  286. package/prompt/index.cjs +25 -0
  287. package/prompt/index.d.ts +9 -0
  288. package/prompt/index.js +9 -0
  289. package/run/ConsoleLogger.cjs +12 -0
  290. package/run/ConsoleLogger.d.ts +6 -0
  291. package/run/ConsoleLogger.js +8 -0
  292. package/run/DefaultRun.cjs +78 -0
  293. package/run/DefaultRun.d.ts +24 -0
  294. package/run/DefaultRun.js +74 -0
  295. package/run/IdMetadata.cjs +2 -0
  296. package/run/IdMetadata.d.ts +7 -0
  297. package/run/IdMetadata.js +1 -0
  298. package/run/Run.cjs +2 -0
  299. package/run/Run.d.ts +27 -0
  300. package/run/Run.js +1 -0
  301. package/run/RunFunction.cjs +2 -0
  302. package/run/RunFunction.d.ts +13 -0
  303. package/run/RunFunction.js +1 -0
  304. package/run/Vector.cjs +2 -0
  305. package/run/Vector.d.ts +5 -0
  306. package/run/Vector.js +1 -0
  307. package/run/index.cjs +22 -0
  308. package/run/index.d.ts +6 -0
  309. package/run/index.js +6 -0
  310. package/text-chunk/TextChunk.cjs +2 -0
  311. package/text-chunk/TextChunk.d.ts +3 -0
  312. package/text-chunk/TextChunk.js +1 -0
  313. package/text-chunk/index.cjs +22 -0
  314. package/text-chunk/index.d.ts +6 -0
  315. package/text-chunk/index.js +6 -0
  316. package/text-chunk/retrieve-text-chunks/TextChunkRetriever.cjs +2 -0
  317. package/text-chunk/retrieve-text-chunks/TextChunkRetriever.d.ts +8 -0
  318. package/text-chunk/retrieve-text-chunks/TextChunkRetriever.js +1 -0
  319. package/text-chunk/retrieve-text-chunks/retrieveTextChunks.cjs +10 -0
  320. package/text-chunk/retrieve-text-chunks/retrieveTextChunks.d.ts +6 -0
  321. package/text-chunk/retrieve-text-chunks/retrieveTextChunks.js +6 -0
  322. package/text-chunk/split/SplitFunction.cjs +2 -0
  323. package/text-chunk/split/SplitFunction.d.ts +4 -0
  324. package/text-chunk/split/SplitFunction.js +1 -0
  325. package/text-chunk/split/splitOnSeparator.cjs +12 -0
  326. package/text-chunk/split/splitOnSeparator.d.ts +8 -0
  327. package/text-chunk/split/splitOnSeparator.js +7 -0
  328. package/text-chunk/split/splitRecursively.cjs +41 -0
  329. package/text-chunk/split/splitRecursively.d.ts +22 -0
  330. package/text-chunk/split/splitRecursively.js +33 -0
  331. package/util/DurationMeasurement.cjs +42 -0
  332. package/util/DurationMeasurement.d.ts +5 -0
  333. package/util/DurationMeasurement.js +38 -0
  334. package/util/ErrorHandler.cjs +2 -0
  335. package/util/ErrorHandler.d.ts +1 -0
  336. package/util/ErrorHandler.js +1 -0
  337. package/util/SafeResult.cjs +2 -0
  338. package/util/SafeResult.d.ts +8 -0
  339. package/util/SafeResult.js +1 -0
  340. package/util/api/AbortError.cjs +9 -0
  341. package/util/api/AbortError.d.ts +3 -0
  342. package/util/api/AbortError.js +5 -0
  343. package/util/api/ApiCallError.cjs +45 -0
  344. package/util/api/ApiCallError.d.ts +15 -0
  345. package/util/api/ApiCallError.js +41 -0
  346. package/util/api/RetryError.cjs +24 -0
  347. package/util/api/RetryError.d.ts +10 -0
  348. package/util/api/RetryError.js +20 -0
  349. package/util/api/RetryFunction.cjs +2 -0
  350. package/util/api/RetryFunction.d.ts +1 -0
  351. package/util/api/RetryFunction.js +1 -0
  352. package/util/api/ThrottleFunction.cjs +2 -0
  353. package/util/api/ThrottleFunction.d.ts +1 -0
  354. package/util/api/ThrottleFunction.js +1 -0
  355. package/util/api/callWithRetryAndThrottle.cjs +7 -0
  356. package/util/api/callWithRetryAndThrottle.d.ts +7 -0
  357. package/util/api/callWithRetryAndThrottle.js +3 -0
  358. package/util/api/postToApi.cjs +103 -0
  359. package/util/api/postToApi.d.ts +29 -0
  360. package/util/api/postToApi.js +96 -0
  361. package/util/api/retryNever.cjs +8 -0
  362. package/util/api/retryNever.d.ts +4 -0
  363. package/util/api/retryNever.js +4 -0
  364. package/util/api/retryWithExponentialBackoff.cjs +48 -0
  365. package/util/api/retryWithExponentialBackoff.d.ts +10 -0
  366. package/util/api/retryWithExponentialBackoff.js +44 -0
  367. package/util/api/throttleMaxConcurrency.cjs +65 -0
  368. package/util/api/throttleMaxConcurrency.d.ts +7 -0
  369. package/util/api/throttleMaxConcurrency.js +61 -0
  370. package/util/api/throttleUnlimitedConcurrency.cjs +8 -0
  371. package/util/api/throttleUnlimitedConcurrency.d.ts +5 -0
  372. package/util/api/throttleUnlimitedConcurrency.js +4 -0
  373. package/util/cosineSimilarity.cjs +26 -0
  374. package/util/cosineSimilarity.d.ts +11 -0
  375. package/util/cosineSimilarity.js +22 -0
  376. package/util/index.cjs +26 -0
  377. package/util/index.d.ts +10 -0
  378. package/util/index.js +10 -0
  379. package/util/never.cjs +6 -0
  380. package/util/never.d.ts +1 -0
  381. package/util/never.js +2 -0
  382. package/util/runSafe.cjs +15 -0
  383. package/util/runSafe.d.ts +2 -0
  384. package/util/runSafe.js +11 -0
  385. package/vector-index/VectorIndex.cjs +2 -0
  386. package/vector-index/VectorIndex.d.ts +18 -0
  387. package/vector-index/VectorIndex.js +1 -0
  388. package/vector-index/VectorIndexSimilarTextChunkRetriever.cjs +57 -0
  389. package/vector-index/VectorIndexSimilarTextChunkRetriever.d.ts +20 -0
  390. package/vector-index/VectorIndexSimilarTextChunkRetriever.js +53 -0
  391. package/vector-index/VectorIndexTextChunkStore.cjs +77 -0
  392. package/vector-index/VectorIndexTextChunkStore.d.ts +35 -0
  393. package/vector-index/VectorIndexTextChunkStore.js +73 -0
  394. package/vector-index/index.cjs +22 -0
  395. package/vector-index/index.d.ts +6 -0
  396. package/vector-index/index.js +6 -0
  397. package/vector-index/memory/MemoryVectorIndex.cjs +63 -0
  398. package/vector-index/memory/MemoryVectorIndex.d.ts +31 -0
  399. package/vector-index/memory/MemoryVectorIndex.js +56 -0
  400. package/vector-index/pinecone/PineconeVectorIndex.cjs +66 -0
  401. package/vector-index/pinecone/PineconeVectorIndex.d.ts +29 -0
  402. package/vector-index/pinecone/PineconeVectorIndex.js +62 -0
  403. package/vector-index/upsertTextChunks.cjs +15 -0
  404. package/vector-index/upsertTextChunks.d.ts +11 -0
  405. package/vector-index/upsertTextChunks.js +11 -0
@@ -0,0 +1,88 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.PromptMappingTextGenerationModel = void 0;
4
+ class PromptMappingTextGenerationModel {
5
+ constructor({ model, promptMapping, }) {
6
+ Object.defineProperty(this, "model", {
7
+ enumerable: true,
8
+ configurable: true,
9
+ writable: true,
10
+ value: void 0
11
+ });
12
+ Object.defineProperty(this, "promptMapping", {
13
+ enumerable: true,
14
+ configurable: true,
15
+ writable: true,
16
+ value: void 0
17
+ });
18
+ this.model = model;
19
+ this.promptMapping = promptMapping;
20
+ }
21
+ get modelInformation() {
22
+ return this.model.modelInformation;
23
+ }
24
+ get settings() {
25
+ return this.model.settings;
26
+ }
27
+ get tokenizer() {
28
+ return this.model.tokenizer;
29
+ }
30
+ get contextWindowSize() {
31
+ return this.model.contextWindowSize;
32
+ }
33
+ get countPromptTokens() {
34
+ const originalCountPromptTokens = this.model.countPromptTokens?.bind(this.model);
35
+ if (originalCountPromptTokens === undefined) {
36
+ return undefined;
37
+ }
38
+ return ((prompt) => originalCountPromptTokens(this.promptMapping.map(prompt)));
39
+ }
40
+ generateTextResponse(prompt, options) {
41
+ const mappedPrompt = this.promptMapping.map(prompt);
42
+ return this.model.generateTextResponse(mappedPrompt, options);
43
+ }
44
+ extractText(response) {
45
+ return this.model.extractText(response);
46
+ }
47
+ get generateDeltaStreamResponse() {
48
+ const originalGenerateDeltaStreamResponse = this.model.generateDeltaStreamResponse?.bind(this.model);
49
+ if (originalGenerateDeltaStreamResponse === undefined) {
50
+ return undefined;
51
+ }
52
+ return ((prompt, options) => {
53
+ const mappedPrompt = this.promptMapping.map(prompt);
54
+ return originalGenerateDeltaStreamResponse(mappedPrompt, options);
55
+ });
56
+ }
57
+ get extractTextDelta() {
58
+ return this.model.extractTextDelta;
59
+ }
60
+ mapPrompt(promptMapping) {
61
+ return new PromptMappingTextGenerationModel({
62
+ model: this.withStopTokens(promptMapping.stopTokens),
63
+ promptMapping,
64
+ });
65
+ }
66
+ withSettings(additionalSettings) {
67
+ return new PromptMappingTextGenerationModel({
68
+ model: this.model.withSettings(additionalSettings),
69
+ promptMapping: this.promptMapping,
70
+ });
71
+ }
72
+ get maxCompletionTokens() {
73
+ return this.model.maxCompletionTokens;
74
+ }
75
+ withMaxCompletionTokens(maxCompletionTokens) {
76
+ return new PromptMappingTextGenerationModel({
77
+ model: this.model.withMaxCompletionTokens(maxCompletionTokens),
78
+ promptMapping: this.promptMapping,
79
+ });
80
+ }
81
+ withStopTokens(stopTokens) {
82
+ return new PromptMappingTextGenerationModel({
83
+ model: this.model.withStopTokens(stopTokens),
84
+ promptMapping: this.promptMapping,
85
+ });
86
+ }
87
+ }
88
+ exports.PromptMappingTextGenerationModel = PromptMappingTextGenerationModel;
@@ -0,0 +1,26 @@
1
+ import { FunctionOptions } from "../model-function/FunctionOptions.js";
2
+ import { DeltaEvent } from "../model-function/generate-text/DeltaEvent.js";
3
+ import { TextGenerationModel, TextGenerationModelSettings } from "../model-function/generate-text/TextGenerationModel.js";
4
+ import { PromptMapping } from "./PromptMapping.js";
5
+ export declare class PromptMappingTextGenerationModel<PROMPT, MODEL_PROMPT, RESPONSE, FULL_DELTA, SETTINGS extends TextGenerationModelSettings, MODEL extends TextGenerationModel<MODEL_PROMPT, RESPONSE, FULL_DELTA, SETTINGS>> implements TextGenerationModel<PROMPT, RESPONSE, FULL_DELTA, SETTINGS> {
6
+ private readonly model;
7
+ private readonly promptMapping;
8
+ constructor({ model, promptMapping, }: {
9
+ model: MODEL;
10
+ promptMapping: PromptMapping<PROMPT, MODEL_PROMPT>;
11
+ });
12
+ get modelInformation(): import("../index.js").ModelInformation;
13
+ get settings(): SETTINGS;
14
+ get tokenizer(): MODEL["tokenizer"];
15
+ get contextWindowSize(): MODEL["contextWindowSize"];
16
+ get countPromptTokens(): MODEL["countPromptTokens"] extends undefined ? undefined : (prompt: PROMPT) => PromiseLike<number>;
17
+ generateTextResponse(prompt: PROMPT, options?: FunctionOptions<SETTINGS>): PromiseLike<RESPONSE>;
18
+ extractText(response: RESPONSE): string;
19
+ get generateDeltaStreamResponse(): MODEL["generateDeltaStreamResponse"] extends undefined ? undefined : (prompt: PROMPT, options: FunctionOptions<SETTINGS>) => PromiseLike<AsyncIterable<DeltaEvent<FULL_DELTA>>>;
20
+ get extractTextDelta(): MODEL["extractTextDelta"];
21
+ mapPrompt<INPUT_PROMPT>(promptMapping: PromptMapping<INPUT_PROMPT, PROMPT>): PromptMappingTextGenerationModel<INPUT_PROMPT, PROMPT, RESPONSE, FULL_DELTA, SETTINGS, this>;
22
+ withSettings(additionalSettings: Partial<SETTINGS>): this;
23
+ get maxCompletionTokens(): MODEL["maxCompletionTokens"];
24
+ withMaxCompletionTokens(maxCompletionTokens: number): this;
25
+ withStopTokens(stopTokens: string[]): this;
26
+ }
@@ -0,0 +1,84 @@
1
+ export class PromptMappingTextGenerationModel {
2
+ constructor({ model, promptMapping, }) {
3
+ Object.defineProperty(this, "model", {
4
+ enumerable: true,
5
+ configurable: true,
6
+ writable: true,
7
+ value: void 0
8
+ });
9
+ Object.defineProperty(this, "promptMapping", {
10
+ enumerable: true,
11
+ configurable: true,
12
+ writable: true,
13
+ value: void 0
14
+ });
15
+ this.model = model;
16
+ this.promptMapping = promptMapping;
17
+ }
18
+ get modelInformation() {
19
+ return this.model.modelInformation;
20
+ }
21
+ get settings() {
22
+ return this.model.settings;
23
+ }
24
+ get tokenizer() {
25
+ return this.model.tokenizer;
26
+ }
27
+ get contextWindowSize() {
28
+ return this.model.contextWindowSize;
29
+ }
30
+ get countPromptTokens() {
31
+ const originalCountPromptTokens = this.model.countPromptTokens?.bind(this.model);
32
+ if (originalCountPromptTokens === undefined) {
33
+ return undefined;
34
+ }
35
+ return ((prompt) => originalCountPromptTokens(this.promptMapping.map(prompt)));
36
+ }
37
+ generateTextResponse(prompt, options) {
38
+ const mappedPrompt = this.promptMapping.map(prompt);
39
+ return this.model.generateTextResponse(mappedPrompt, options);
40
+ }
41
+ extractText(response) {
42
+ return this.model.extractText(response);
43
+ }
44
+ get generateDeltaStreamResponse() {
45
+ const originalGenerateDeltaStreamResponse = this.model.generateDeltaStreamResponse?.bind(this.model);
46
+ if (originalGenerateDeltaStreamResponse === undefined) {
47
+ return undefined;
48
+ }
49
+ return ((prompt, options) => {
50
+ const mappedPrompt = this.promptMapping.map(prompt);
51
+ return originalGenerateDeltaStreamResponse(mappedPrompt, options);
52
+ });
53
+ }
54
+ get extractTextDelta() {
55
+ return this.model.extractTextDelta;
56
+ }
57
+ mapPrompt(promptMapping) {
58
+ return new PromptMappingTextGenerationModel({
59
+ model: this.withStopTokens(promptMapping.stopTokens),
60
+ promptMapping,
61
+ });
62
+ }
63
+ withSettings(additionalSettings) {
64
+ return new PromptMappingTextGenerationModel({
65
+ model: this.model.withSettings(additionalSettings),
66
+ promptMapping: this.promptMapping,
67
+ });
68
+ }
69
+ get maxCompletionTokens() {
70
+ return this.model.maxCompletionTokens;
71
+ }
72
+ withMaxCompletionTokens(maxCompletionTokens) {
73
+ return new PromptMappingTextGenerationModel({
74
+ model: this.model.withMaxCompletionTokens(maxCompletionTokens),
75
+ promptMapping: this.promptMapping,
76
+ });
77
+ }
78
+ withStopTokens(stopTokens) {
79
+ return new PromptMappingTextGenerationModel({
80
+ model: this.model.withStopTokens(stopTokens),
81
+ promptMapping: this.promptMapping,
82
+ });
83
+ }
84
+ }
@@ -0,0 +1,50 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ChatToTextPromptMapping = exports.InstructionToTextPromptMapping = void 0;
4
+ const validateChatPrompt_js_1 = require("./chat/validateChatPrompt.cjs");
5
+ const InstructionToTextPromptMapping = () => ({
6
+ stopTokens: [],
7
+ map: (instruction) => instruction.system != null
8
+ ? `${instruction.system}\n\n${instruction.instruction}`
9
+ : instruction.instruction,
10
+ });
11
+ exports.InstructionToTextPromptMapping = InstructionToTextPromptMapping;
12
+ /**
13
+ * A mapping from a chat prompt to a text prompt.
14
+ *
15
+ * @param user The label of the user in the chat.
16
+ * @param ai The name of the AI in the chat.
17
+ */
18
+ const ChatToTextPromptMapping = ({ user, ai }) => ({
19
+ map: (chatPrompt) => {
20
+ (0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
21
+ let text = "";
22
+ for (let i = 0; i < chatPrompt.length; i++) {
23
+ const message = chatPrompt[i];
24
+ // system message:
25
+ if (i === 0 &&
26
+ "system" in message &&
27
+ typeof message.system === "string") {
28
+ text += `${message.system}\n\n`;
29
+ continue;
30
+ }
31
+ // user message
32
+ if ("user" in message) {
33
+ text += `${user}:\n${message.user}\n\n`;
34
+ continue;
35
+ }
36
+ // ai message:
37
+ if ("ai" in message) {
38
+ text += `${ai}:\n${message.ai}\n\n`;
39
+ continue;
40
+ }
41
+ // unsupported message:
42
+ throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
43
+ }
44
+ // AI message prefix:
45
+ text += `${ai}:\n`;
46
+ return text;
47
+ },
48
+ stopTokens: [`\n${user}:`],
49
+ });
50
+ exports.ChatToTextPromptMapping = ChatToTextPromptMapping;
@@ -0,0 +1,14 @@
1
+ import { PromptMapping } from "./PromptMapping.js";
2
+ import { InstructionPrompt } from "./InstructionPrompt.js";
3
+ import { ChatPrompt } from "./chat/ChatPrompt.js";
4
+ export declare const InstructionToTextPromptMapping: () => PromptMapping<InstructionPrompt, string>;
5
+ /**
6
+ * A mapping from a chat prompt to a text prompt.
7
+ *
8
+ * @param user The label of the user in the chat.
9
+ * @param ai The name of the AI in the chat.
10
+ */
11
+ export declare const ChatToTextPromptMapping: ({ user, ai, }: {
12
+ user: string;
13
+ ai: string;
14
+ }) => PromptMapping<ChatPrompt, string>;
@@ -0,0 +1,45 @@
1
+ import { validateChatPrompt } from "./chat/validateChatPrompt.js";
2
+ export const InstructionToTextPromptMapping = () => ({
3
+ stopTokens: [],
4
+ map: (instruction) => instruction.system != null
5
+ ? `${instruction.system}\n\n${instruction.instruction}`
6
+ : instruction.instruction,
7
+ });
8
+ /**
9
+ * A mapping from a chat prompt to a text prompt.
10
+ *
11
+ * @param user The label of the user in the chat.
12
+ * @param ai The name of the AI in the chat.
13
+ */
14
+ export const ChatToTextPromptMapping = ({ user, ai }) => ({
15
+ map: (chatPrompt) => {
16
+ validateChatPrompt(chatPrompt);
17
+ let text = "";
18
+ for (let i = 0; i < chatPrompt.length; i++) {
19
+ const message = chatPrompt[i];
20
+ // system message:
21
+ if (i === 0 &&
22
+ "system" in message &&
23
+ typeof message.system === "string") {
24
+ text += `${message.system}\n\n`;
25
+ continue;
26
+ }
27
+ // user message
28
+ if ("user" in message) {
29
+ text += `${user}:\n${message.user}\n\n`;
30
+ continue;
31
+ }
32
+ // ai message:
33
+ if ("ai" in message) {
34
+ text += `${ai}:\n${message.ai}\n\n`;
35
+ continue;
36
+ }
37
+ // unsupported message:
38
+ throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
39
+ }
40
+ // AI message prefix:
41
+ text += `${ai}:\n`;
42
+ return text;
43
+ },
44
+ stopTokens: [`\n${user}:`],
45
+ });
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,33 @@
1
+ /**
2
+ * A chat prompt is a sequence of messages with the following structure:
3
+ *
4
+ * - A chat prompt can optionally start with a system message.
5
+ * - After the optional system message, the first message of the chat must be a user message.
6
+ * - Then it must be alternating between an ai message and a user message.
7
+ * - The last message must always be a user message.
8
+ *
9
+ * The type checking is done at runtime, because there a no good ways to do it statically.
10
+ *
11
+ * @example
12
+ * ```ts
13
+ * [
14
+ * { system: "You are a celebrated poet." },
15
+ * { user: "Write a short story about a robot learning to love." },
16
+ * { ai: "Once upon a time, there was a robot who learned to love." },
17
+ * { user: "That's a great start!" },
18
+ * ]
19
+ * ```
20
+ *
21
+ * @see validateChatPrompt
22
+ */
23
+ export type ChatPrompt = [...({
24
+ user: string;
25
+ } | {
26
+ ai: string;
27
+ })[]] | [{
28
+ system: string;
29
+ }, ...({
30
+ user: string;
31
+ } | {
32
+ ai: string;
33
+ })[]];
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,50 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.trimChatPrompt = void 0;
4
+ const validateChatPrompt_js_1 = require("./validateChatPrompt.cjs");
5
+ /**
6
+ * Keeps only the most recent messages in the prompt, while leaving enough space for the completion.
7
+ *
8
+ * It will remove user-ai message pairs that don't fit. The result is always a valid chat prompt.
9
+ *
10
+ * When the minimal chat prompt (system message + last user message) is already too long, it will only
11
+ * return this minimal chat prompt.
12
+ */
13
+ async function trimChatPrompt({ prompt, model, tokenLimit = model.contextWindowSize - model.maxCompletionTokens, }) {
14
+ (0, validateChatPrompt_js_1.validateChatPrompt)(prompt);
15
+ const startsWithSystemMessage = "system" in prompt[0];
16
+ const systemMessage = startsWithSystemMessage ? [prompt[0]] : [];
17
+ let messages = [];
18
+ // add the last message (final user message) to the prompt
19
+ messages.push(prompt[prompt.length - 1]);
20
+ // check if the minimal prompt is already too long
21
+ const promptTokenCount = await model.countPromptTokens([
22
+ ...systemMessage,
23
+ ...messages,
24
+ ]);
25
+ // the minimal chat prompt is already over the token limit and cannot be trimmed further:
26
+ if (promptTokenCount > tokenLimit) {
27
+ return [...systemMessage, prompt[prompt.length - 1]];
28
+ }
29
+ // inner messages
30
+ const innerMessages = prompt.slice(startsWithSystemMessage ? 1 : 0, -1);
31
+ // taking always a pair of user-message and ai-message from the end, moving backwards
32
+ for (let i = innerMessages.length - 1; i >= 0; i -= 2) {
33
+ const aiMessage = innerMessages[i];
34
+ const userMessage = innerMessages[i - 1];
35
+ // create a temporary array and check if it fits within the token limit
36
+ const tokenCount = await model.countPromptTokens([
37
+ ...systemMessage,
38
+ userMessage,
39
+ aiMessage,
40
+ ...messages,
41
+ ]);
42
+ if (tokenCount > tokenLimit) {
43
+ break;
44
+ }
45
+ // if it fits, add the messages to the messages array
46
+ messages = [userMessage, aiMessage, ...messages];
47
+ }
48
+ return [...systemMessage, ...messages];
49
+ }
50
+ exports.trimChatPrompt = trimChatPrompt;
@@ -0,0 +1,19 @@
1
+ import { TextGenerationModel } from "model-function/generate-text/TextGenerationModel.js";
2
+ import { ChatPrompt } from "./ChatPrompt.js";
3
+ /**
4
+ * Keeps only the most recent messages in the prompt, while leaving enough space for the completion.
5
+ *
6
+ * It will remove user-ai message pairs that don't fit. The result is always a valid chat prompt.
7
+ *
8
+ * When the minimal chat prompt (system message + last user message) is already too long, it will only
9
+ * return this minimal chat prompt.
10
+ */
11
+ export declare function trimChatPrompt({ prompt, model, tokenLimit, }: {
12
+ prompt: ChatPrompt;
13
+ model: TextGenerationModel<ChatPrompt, any, any, any> & {
14
+ contextWindowSize: number;
15
+ maxCompletionTokens: number;
16
+ countPromptTokens: (prompt: ChatPrompt) => PromiseLike<number>;
17
+ };
18
+ tokenLimit?: number;
19
+ }): Promise<ChatPrompt>;
@@ -0,0 +1,46 @@
1
+ import { validateChatPrompt } from "./validateChatPrompt.js";
2
+ /**
3
+ * Keeps only the most recent messages in the prompt, while leaving enough space for the completion.
4
+ *
5
+ * It will remove user-ai message pairs that don't fit. The result is always a valid chat prompt.
6
+ *
7
+ * When the minimal chat prompt (system message + last user message) is already too long, it will only
8
+ * return this minimal chat prompt.
9
+ */
10
+ export async function trimChatPrompt({ prompt, model, tokenLimit = model.contextWindowSize - model.maxCompletionTokens, }) {
11
+ validateChatPrompt(prompt);
12
+ const startsWithSystemMessage = "system" in prompt[0];
13
+ const systemMessage = startsWithSystemMessage ? [prompt[0]] : [];
14
+ let messages = [];
15
+ // add the last message (final user message) to the prompt
16
+ messages.push(prompt[prompt.length - 1]);
17
+ // check if the minimal prompt is already too long
18
+ const promptTokenCount = await model.countPromptTokens([
19
+ ...systemMessage,
20
+ ...messages,
21
+ ]);
22
+ // the minimal chat prompt is already over the token limit and cannot be trimmed further:
23
+ if (promptTokenCount > tokenLimit) {
24
+ return [...systemMessage, prompt[prompt.length - 1]];
25
+ }
26
+ // inner messages
27
+ const innerMessages = prompt.slice(startsWithSystemMessage ? 1 : 0, -1);
28
+ // taking always a pair of user-message and ai-message from the end, moving backwards
29
+ for (let i = innerMessages.length - 1; i >= 0; i -= 2) {
30
+ const aiMessage = innerMessages[i];
31
+ const userMessage = innerMessages[i - 1];
32
+ // create a temporary array and check if it fits within the token limit
33
+ const tokenCount = await model.countPromptTokens([
34
+ ...systemMessage,
35
+ userMessage,
36
+ aiMessage,
37
+ ...messages,
38
+ ]);
39
+ if (tokenCount > tokenLimit) {
40
+ break;
41
+ }
42
+ // if it fits, add the messages to the messages array
43
+ messages = [userMessage, aiMessage, ...messages];
44
+ }
45
+ return [...systemMessage, ...messages];
46
+ }
@@ -0,0 +1,36 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.validateChatPrompt = exports.ChatPromptValidationError = void 0;
4
+ class ChatPromptValidationError extends Error {
5
+ constructor(message) {
6
+ super(message);
7
+ this.name = "ChatPromptValidationError";
8
+ }
9
+ }
10
+ exports.ChatPromptValidationError = ChatPromptValidationError;
11
+ /**
12
+ * Checks if a chat prompt is valid. Throws a `ChatPromptValidationError` if it's not.
13
+ */
14
+ function validateChatPrompt(chatPrompt) {
15
+ if (chatPrompt.length < 1) {
16
+ throw new ChatPromptValidationError("ChatPrompt should have at least one message.");
17
+ }
18
+ const initialType = "system" in chatPrompt[0] ? "system" : "user";
19
+ if (initialType === "system" && chatPrompt.length === 1) {
20
+ throw new ChatPromptValidationError("A system message should be followed by a user message.");
21
+ }
22
+ let expectedType = initialType === "system" ? "user" : "ai";
23
+ for (let i = 1; i < chatPrompt.length; i++) {
24
+ const messageType = "user" in chatPrompt[i] ? "user" : "ai";
25
+ if (messageType !== expectedType) {
26
+ throw new ChatPromptValidationError(`Message at index ${i} should be a ${expectedType} message, but it's a ${messageType} message.`);
27
+ }
28
+ // Flip the expected type for the next iteration.
29
+ expectedType = expectedType === "user" ? "ai" : "user";
30
+ }
31
+ // If the last message is not a user message, throw an error.
32
+ if (expectedType !== "ai") {
33
+ throw new ChatPromptValidationError("The last message should be a user message.");
34
+ }
35
+ }
36
+ exports.validateChatPrompt = validateChatPrompt;
@@ -0,0 +1,8 @@
1
+ import { ChatPrompt } from "./ChatPrompt.js";
2
+ export declare class ChatPromptValidationError extends Error {
3
+ constructor(message: string);
4
+ }
5
+ /**
6
+ * Checks if a chat prompt is valid. Throws a `ChatPromptValidationError` if it's not.
7
+ */
8
+ export declare function validateChatPrompt(chatPrompt: ChatPrompt): void;
@@ -0,0 +1,31 @@
1
+ export class ChatPromptValidationError extends Error {
2
+ constructor(message) {
3
+ super(message);
4
+ this.name = "ChatPromptValidationError";
5
+ }
6
+ }
7
+ /**
8
+ * Checks if a chat prompt is valid. Throws a `ChatPromptValidationError` if it's not.
9
+ */
10
+ export function validateChatPrompt(chatPrompt) {
11
+ if (chatPrompt.length < 1) {
12
+ throw new ChatPromptValidationError("ChatPrompt should have at least one message.");
13
+ }
14
+ const initialType = "system" in chatPrompt[0] ? "system" : "user";
15
+ if (initialType === "system" && chatPrompt.length === 1) {
16
+ throw new ChatPromptValidationError("A system message should be followed by a user message.");
17
+ }
18
+ let expectedType = initialType === "system" ? "user" : "ai";
19
+ for (let i = 1; i < chatPrompt.length; i++) {
20
+ const messageType = "user" in chatPrompt[i] ? "user" : "ai";
21
+ if (messageType !== expectedType) {
22
+ throw new ChatPromptValidationError(`Message at index ${i} should be a ${expectedType} message, but it's a ${messageType} message.`);
23
+ }
24
+ // Flip the expected type for the next iteration.
25
+ expectedType = expectedType === "user" ? "ai" : "user";
26
+ }
27
+ // If the last message is not a user message, throw an error.
28
+ if (expectedType !== "ai") {
29
+ throw new ChatPromptValidationError("The last message should be a user message.");
30
+ }
31
+ }
@@ -0,0 +1,25 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ __exportStar(require("./InstructionPrompt.cjs"), exports);
18
+ __exportStar(require("./Llama2PromptMapping.cjs"), exports);
19
+ __exportStar(require("./OpenAIChatPromptMapping.cjs"), exports);
20
+ __exportStar(require("./PromptMapping.cjs"), exports);
21
+ __exportStar(require("./PromptMappingTextGenerationModel.cjs"), exports);
22
+ __exportStar(require("./TextPromptMapping.cjs"), exports);
23
+ __exportStar(require("./chat/ChatPrompt.cjs"), exports);
24
+ __exportStar(require("./chat/trimChatPrompt.cjs"), exports);
25
+ __exportStar(require("./chat/validateChatPrompt.cjs"), exports);
@@ -0,0 +1,9 @@
1
+ export * from "./InstructionPrompt.js";
2
+ export * from "./Llama2PromptMapping.js";
3
+ export * from "./OpenAIChatPromptMapping.js";
4
+ export * from "./PromptMapping.js";
5
+ export * from "./PromptMappingTextGenerationModel.js";
6
+ export * from "./TextPromptMapping.js";
7
+ export * from "./chat/ChatPrompt.js";
8
+ export * from "./chat/trimChatPrompt.js";
9
+ export * from "./chat/validateChatPrompt.js";
@@ -0,0 +1,9 @@
1
+ export * from "./InstructionPrompt.js";
2
+ export * from "./Llama2PromptMapping.js";
3
+ export * from "./OpenAIChatPromptMapping.js";
4
+ export * from "./PromptMapping.js";
5
+ export * from "./PromptMappingTextGenerationModel.js";
6
+ export * from "./TextPromptMapping.js";
7
+ export * from "./chat/ChatPrompt.js";
8
+ export * from "./chat/trimChatPrompt.js";
9
+ export * from "./chat/validateChatPrompt.js";
@@ -0,0 +1,12 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ConsoleLogger = void 0;
4
+ class ConsoleLogger {
5
+ onModelCallStarted(event) {
6
+ console.log(JSON.stringify(event, null, 2));
7
+ }
8
+ onModelCallFinished(event) {
9
+ console.log(JSON.stringify(event, null, 2));
10
+ }
11
+ }
12
+ exports.ConsoleLogger = ConsoleLogger;
@@ -0,0 +1,6 @@
1
+ import { ModelCallFinishedEvent, ModelCallStartedEvent } from "../model-function/ModelCallEvent.js";
2
+ import { ModelCallObserver } from "../model-function/ModelCallObserver.js";
3
+ export declare class ConsoleLogger implements ModelCallObserver {
4
+ onModelCallStarted(event: ModelCallStartedEvent): void;
5
+ onModelCallFinished(event: ModelCallFinishedEvent): void;
6
+ }
@@ -0,0 +1,8 @@
1
+ export class ConsoleLogger {
2
+ onModelCallStarted(event) {
3
+ console.log(JSON.stringify(event, null, 2));
4
+ }
5
+ onModelCallFinished(event) {
6
+ console.log(JSON.stringify(event, null, 2));
7
+ }
8
+ }