modelfusion 0.109.0 → 0.111.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (230) hide show
  1. package/CHANGELOG.md +25 -0
  2. package/core/api/ApiFacade.cjs +20 -0
  3. package/core/api/ApiFacade.d.ts +4 -0
  4. package/core/api/ApiFacade.js +4 -0
  5. package/core/api/BaseUrlApiConfiguration.cjs +47 -5
  6. package/core/api/BaseUrlApiConfiguration.d.ts +23 -6
  7. package/core/api/BaseUrlApiConfiguration.js +45 -4
  8. package/core/api/BaseUrlApiConfiguration.test.cjs +11 -0
  9. package/core/api/BaseUrlApiConfiguration.test.d.ts +1 -0
  10. package/core/api/BaseUrlApiConfiguration.test.js +9 -0
  11. package/core/api/callWithRetryAndThrottle.cjs +3 -3
  12. package/core/api/callWithRetryAndThrottle.js +3 -3
  13. package/core/api/index.cjs +15 -2
  14. package/core/api/index.d.ts +2 -2
  15. package/core/api/index.js +2 -2
  16. package/core/api/postToApi.cjs +28 -5
  17. package/core/api/postToApi.d.ts +5 -4
  18. package/core/api/postToApi.js +26 -4
  19. package/core/api/throttleOff.cjs +8 -0
  20. package/core/api/throttleOff.d.ts +5 -0
  21. package/core/api/throttleOff.js +4 -0
  22. package/{extension → internal}/index.cjs +2 -7
  23. package/{extension → internal}/index.d.ts +1 -1
  24. package/{extension → internal}/index.js +1 -1
  25. package/model-function/generate-structure/generateStructure.d.ts +1 -1
  26. package/model-provider/anthropic/AnthropicApiConfiguration.cjs +14 -6
  27. package/model-provider/anthropic/AnthropicApiConfiguration.d.ts +7 -8
  28. package/model-provider/anthropic/AnthropicApiConfiguration.js +15 -7
  29. package/model-provider/anthropic/AnthropicError.cjs +7 -27
  30. package/model-provider/anthropic/AnthropicError.d.ts +21 -16
  31. package/model-provider/anthropic/AnthropicError.js +7 -25
  32. package/model-provider/anthropic/AnthropicFacade.cjs +10 -1
  33. package/model-provider/anthropic/AnthropicFacade.d.ts +9 -0
  34. package/model-provider/anthropic/AnthropicFacade.js +8 -0
  35. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +22 -24
  36. package/model-provider/anthropic/AnthropicTextGenerationModel.js +23 -25
  37. package/model-provider/anthropic/index.cjs +1 -4
  38. package/model-provider/anthropic/index.d.ts +1 -1
  39. package/model-provider/anthropic/index.js +0 -1
  40. package/model-provider/automatic1111/Automatic1111ApiConfiguration.cjs +12 -4
  41. package/model-provider/automatic1111/Automatic1111ApiConfiguration.d.ts +3 -3
  42. package/model-provider/automatic1111/Automatic1111ApiConfiguration.js +12 -4
  43. package/model-provider/automatic1111/Automatic1111Error.cjs +3 -3
  44. package/model-provider/automatic1111/Automatic1111Error.d.ts +13 -3
  45. package/model-provider/automatic1111/Automatic1111Error.js +4 -4
  46. package/model-provider/automatic1111/Automatic1111Facade.cjs +9 -9
  47. package/model-provider/automatic1111/Automatic1111Facade.d.ts +6 -6
  48. package/model-provider/automatic1111/Automatic1111Facade.js +7 -7
  49. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +11 -7
  50. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +11 -7
  51. package/model-provider/cohere/CohereApiConfiguration.cjs +14 -6
  52. package/model-provider/cohere/CohereApiConfiguration.d.ts +7 -8
  53. package/model-provider/cohere/CohereApiConfiguration.js +15 -7
  54. package/model-provider/cohere/CohereError.cjs +8 -43
  55. package/model-provider/cohere/CohereError.d.ts +9 -16
  56. package/model-provider/cohere/CohereError.js +8 -41
  57. package/model-provider/cohere/CohereFacade.cjs +12 -3
  58. package/model-provider/cohere/CohereFacade.d.ts +11 -2
  59. package/model-provider/cohere/CohereFacade.js +10 -2
  60. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +18 -22
  61. package/model-provider/cohere/CohereTextEmbeddingModel.js +18 -22
  62. package/model-provider/cohere/CohereTextGenerationModel.cjs +31 -39
  63. package/model-provider/cohere/CohereTextGenerationModel.d.ts +110 -8
  64. package/model-provider/cohere/CohereTextGenerationModel.js +31 -39
  65. package/model-provider/cohere/CohereTokenizer.cjs +32 -41
  66. package/model-provider/cohere/CohereTokenizer.d.ts +2 -2
  67. package/model-provider/cohere/CohereTokenizer.js +32 -41
  68. package/model-provider/cohere/index.cjs +1 -3
  69. package/model-provider/cohere/index.d.ts +1 -1
  70. package/model-provider/cohere/index.js +0 -1
  71. package/model-provider/elevenlabs/ElevenLabsApiConfiguration.cjs +14 -6
  72. package/model-provider/elevenlabs/ElevenLabsApiConfiguration.d.ts +7 -8
  73. package/model-provider/elevenlabs/ElevenLabsApiConfiguration.js +15 -7
  74. package/model-provider/elevenlabs/ElevenLabsFacade.cjs +10 -1
  75. package/model-provider/elevenlabs/ElevenLabsFacade.d.ts +9 -0
  76. package/model-provider/elevenlabs/ElevenLabsFacade.js +8 -0
  77. package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +42 -53
  78. package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +44 -55
  79. package/model-provider/huggingface/HuggingFaceApiConfiguration.cjs +14 -6
  80. package/model-provider/huggingface/HuggingFaceApiConfiguration.d.ts +7 -8
  81. package/model-provider/huggingface/HuggingFaceApiConfiguration.js +15 -7
  82. package/model-provider/huggingface/HuggingFaceError.cjs +7 -29
  83. package/model-provider/huggingface/HuggingFaceError.d.ts +9 -16
  84. package/model-provider/huggingface/HuggingFaceError.js +7 -27
  85. package/model-provider/huggingface/HuggingFaceFacade.cjs +10 -1
  86. package/model-provider/huggingface/HuggingFaceFacade.d.ts +9 -0
  87. package/model-provider/huggingface/HuggingFaceFacade.js +8 -0
  88. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +17 -27
  89. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +17 -27
  90. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +22 -23
  91. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +22 -23
  92. package/model-provider/huggingface/index.cjs +1 -3
  93. package/model-provider/huggingface/index.d.ts +1 -1
  94. package/model-provider/huggingface/index.js +0 -1
  95. package/model-provider/llamacpp/LlamaCppApiConfiguration.cjs +13 -6
  96. package/model-provider/llamacpp/LlamaCppApiConfiguration.d.ts +7 -9
  97. package/model-provider/llamacpp/LlamaCppApiConfiguration.js +14 -7
  98. package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +4 -4
  99. package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +173 -5
  100. package/model-provider/llamacpp/LlamaCppCompletionModel.js +4 -4
  101. package/model-provider/llamacpp/LlamaCppError.cjs +7 -27
  102. package/model-provider/llamacpp/LlamaCppError.d.ts +9 -16
  103. package/model-provider/llamacpp/LlamaCppError.js +7 -25
  104. package/model-provider/llamacpp/LlamaCppFacade.cjs +10 -2
  105. package/model-provider/llamacpp/LlamaCppFacade.d.ts +8 -1
  106. package/model-provider/llamacpp/LlamaCppFacade.js +8 -1
  107. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +10 -14
  108. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +10 -14
  109. package/model-provider/llamacpp/LlamaCppTokenizer.cjs +14 -18
  110. package/model-provider/llamacpp/LlamaCppTokenizer.js +14 -18
  111. package/model-provider/llamacpp/index.cjs +1 -3
  112. package/model-provider/llamacpp/index.d.ts +1 -1
  113. package/model-provider/llamacpp/index.js +0 -1
  114. package/model-provider/lmnt/LmntApiConfiguration.cjs +14 -6
  115. package/model-provider/lmnt/LmntApiConfiguration.d.ts +7 -8
  116. package/model-provider/lmnt/LmntApiConfiguration.js +15 -7
  117. package/model-provider/lmnt/LmntFacade.cjs +11 -2
  118. package/model-provider/lmnt/LmntFacade.d.ts +10 -1
  119. package/model-provider/lmnt/LmntFacade.js +9 -1
  120. package/model-provider/lmnt/LmntSpeechModel.cjs +53 -41
  121. package/model-provider/lmnt/LmntSpeechModel.d.ts +51 -3
  122. package/model-provider/lmnt/LmntSpeechModel.js +54 -42
  123. package/model-provider/mistral/MistralApiConfiguration.cjs +14 -6
  124. package/model-provider/mistral/MistralApiConfiguration.d.ts +9 -11
  125. package/model-provider/mistral/MistralApiConfiguration.js +15 -7
  126. package/model-provider/mistral/MistralChatModel.cjs +4 -4
  127. package/model-provider/mistral/MistralChatModel.d.ts +48 -3
  128. package/model-provider/mistral/MistralChatModel.js +5 -5
  129. package/model-provider/mistral/MistralError.cjs +3 -3
  130. package/model-provider/mistral/MistralError.d.ts +15 -3
  131. package/model-provider/mistral/MistralError.js +4 -4
  132. package/model-provider/mistral/MistralFacade.cjs +5 -1
  133. package/model-provider/mistral/MistralFacade.d.ts +10 -3
  134. package/model-provider/mistral/MistralFacade.js +6 -2
  135. package/model-provider/mistral/MistralTextEmbeddingModel.cjs +2 -1
  136. package/model-provider/mistral/MistralTextEmbeddingModel.js +2 -1
  137. package/model-provider/ollama/OllamaApiConfiguration.cjs +13 -6
  138. package/model-provider/ollama/OllamaApiConfiguration.d.ts +7 -10
  139. package/model-provider/ollama/OllamaApiConfiguration.js +14 -7
  140. package/model-provider/ollama/OllamaChatModel.cjs +4 -4
  141. package/model-provider/ollama/OllamaChatModel.d.ts +46 -5
  142. package/model-provider/ollama/OllamaChatModel.js +5 -5
  143. package/model-provider/ollama/OllamaCompletionModel.cjs +4 -4
  144. package/model-provider/ollama/OllamaCompletionModel.d.ts +40 -5
  145. package/model-provider/ollama/OllamaCompletionModel.js +5 -5
  146. package/model-provider/ollama/OllamaError.cjs +3 -3
  147. package/model-provider/ollama/OllamaError.d.ts +7 -3
  148. package/model-provider/ollama/OllamaError.js +4 -4
  149. package/model-provider/ollama/OllamaFacade.cjs +6 -2
  150. package/model-provider/ollama/OllamaFacade.d.ts +8 -3
  151. package/model-provider/ollama/OllamaFacade.js +6 -2
  152. package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +15 -16
  153. package/model-provider/ollama/OllamaTextEmbeddingModel.js +15 -16
  154. package/model-provider/openai/AbstractOpenAIChatModel.cjs +4 -4
  155. package/model-provider/openai/AbstractOpenAIChatModel.d.ts +148 -2
  156. package/model-provider/openai/AbstractOpenAIChatModel.js +4 -4
  157. package/model-provider/openai/AbstractOpenAICompletionModel.cjs +30 -32
  158. package/model-provider/openai/AbstractOpenAICompletionModel.js +30 -32
  159. package/model-provider/openai/AzureOpenAIApiConfiguration.d.ts +9 -8
  160. package/model-provider/openai/OpenAIApiConfiguration.cjs +14 -6
  161. package/model-provider/openai/OpenAIApiConfiguration.d.ts +7 -8
  162. package/model-provider/openai/OpenAIApiConfiguration.js +15 -7
  163. package/model-provider/openai/OpenAICompletionModel.cjs +3 -91
  164. package/model-provider/openai/OpenAICompletionModel.d.ts +3 -71
  165. package/model-provider/openai/OpenAICompletionModel.js +3 -91
  166. package/model-provider/openai/OpenAIError.cjs +8 -8
  167. package/model-provider/openai/OpenAIError.d.ts +27 -3
  168. package/model-provider/openai/OpenAIError.js +9 -9
  169. package/model-provider/openai/OpenAIFacade.cjs +23 -2
  170. package/model-provider/openai/OpenAIFacade.d.ts +20 -2
  171. package/model-provider/openai/OpenAIFacade.js +20 -1
  172. package/model-provider/openai/OpenAIImageGenerationModel.cjs +20 -21
  173. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +1 -1
  174. package/model-provider/openai/OpenAIImageGenerationModel.js +20 -21
  175. package/model-provider/openai/OpenAISpeechModel.cjs +17 -22
  176. package/model-provider/openai/OpenAISpeechModel.js +17 -22
  177. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +18 -23
  178. package/model-provider/openai/OpenAITextEmbeddingModel.js +18 -23
  179. package/model-provider/openai/OpenAITranscriptionModel.cjs +42 -48
  180. package/model-provider/openai/OpenAITranscriptionModel.d.ts +14 -10
  181. package/model-provider/openai/OpenAITranscriptionModel.js +42 -48
  182. package/model-provider/openai/TikTokenTokenizer.cjs +0 -18
  183. package/model-provider/openai/TikTokenTokenizer.d.ts +3 -3
  184. package/model-provider/openai/TikTokenTokenizer.js +0 -18
  185. package/model-provider/openai-compatible/FireworksAIApiConfiguration.cjs +11 -7
  186. package/model-provider/openai-compatible/FireworksAIApiConfiguration.d.ts +4 -9
  187. package/model-provider/openai-compatible/FireworksAIApiConfiguration.js +12 -8
  188. package/model-provider/openai-compatible/OpenAICompatibleFacade.cjs +25 -1
  189. package/model-provider/openai-compatible/OpenAICompatibleFacade.d.ts +23 -0
  190. package/model-provider/openai-compatible/OpenAICompatibleFacade.js +22 -0
  191. package/model-provider/openai-compatible/TogetherAIApiConfiguration.cjs +11 -7
  192. package/model-provider/openai-compatible/TogetherAIApiConfiguration.d.ts +4 -9
  193. package/model-provider/openai-compatible/TogetherAIApiConfiguration.js +12 -8
  194. package/model-provider/stability/StabilityApiConfiguration.cjs +13 -12
  195. package/model-provider/stability/StabilityApiConfiguration.d.ts +4 -4
  196. package/model-provider/stability/StabilityApiConfiguration.js +13 -12
  197. package/model-provider/stability/StabilityError.cjs +3 -3
  198. package/model-provider/stability/StabilityError.d.ts +7 -3
  199. package/model-provider/stability/StabilityError.js +4 -4
  200. package/model-provider/stability/StabilityFacade.cjs +9 -9
  201. package/model-provider/stability/StabilityFacade.d.ts +8 -8
  202. package/model-provider/stability/StabilityFacade.js +7 -7
  203. package/model-provider/stability/StabilityImageGenerationModel.cjs +2 -1
  204. package/model-provider/stability/StabilityImageGenerationModel.js +2 -1
  205. package/model-provider/whispercpp/WhisperCppApiConfiguration.cjs +13 -6
  206. package/model-provider/whispercpp/WhisperCppApiConfiguration.d.ts +7 -10
  207. package/model-provider/whispercpp/WhisperCppApiConfiguration.js +14 -7
  208. package/model-provider/whispercpp/WhisperCppFacade.cjs +9 -5
  209. package/model-provider/whispercpp/WhisperCppFacade.d.ts +7 -2
  210. package/model-provider/whispercpp/WhisperCppFacade.js +8 -4
  211. package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +5 -2
  212. package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +6 -3
  213. package/package.json +5 -5
  214. package/tool/WebSearchTool.cjs +2 -2
  215. package/tool/WebSearchTool.d.ts +1 -2
  216. package/tool/WebSearchTool.js +3 -3
  217. package/vector-index/memory/MemoryVectorIndex.cjs +2 -2
  218. package/vector-index/memory/MemoryVectorIndex.js +3 -3
  219. package/core/api/BaseUrlPartsApiConfiguration.cjs +0 -53
  220. package/core/api/BaseUrlPartsApiConfiguration.d.ts +0 -26
  221. package/core/api/BaseUrlPartsApiConfiguration.js +0 -49
  222. package/core/api/throttleUnlimitedConcurrency.cjs +0 -8
  223. package/core/api/throttleUnlimitedConcurrency.d.ts +0 -5
  224. package/core/api/throttleUnlimitedConcurrency.js +0 -4
  225. package/model-provider/elevenlabs/ElevenLabsError.cjs +0 -30
  226. package/model-provider/elevenlabs/ElevenLabsError.d.ts +0 -3
  227. package/model-provider/elevenlabs/ElevenLabsError.js +0 -26
  228. package/model-provider/lmnt/LmntError.cjs +0 -30
  229. package/model-provider/lmnt/LmntError.d.ts +0 -3
  230. package/model-provider/lmnt/LmntError.js +0 -26
@@ -1,7 +1,14 @@
1
- import { MistralApiConfiguration, MistralApiConfigurationSettings } from "./MistralApiConfiguration.js";
2
- import { MistralTextEmbeddingModel, MistralTextEmbeddingModelSettings } from "./MistralTextEmbeddingModel.js";
1
+ import { PartialBaseUrlPartsApiConfigurationOptions } from "../../core/api/BaseUrlApiConfiguration.js";
2
+ import { MistralApiConfiguration } from "./MistralApiConfiguration.js";
3
3
  import { MistralChatModel, MistralChatModelSettings } from "./MistralChatModel.js";
4
- export declare function Api(settings: MistralApiConfigurationSettings): MistralApiConfiguration;
4
+ import { MistralTextEmbeddingModel, MistralTextEmbeddingModelSettings } from "./MistralTextEmbeddingModel.js";
5
+ /**
6
+ * Creates an API configuration for the Mistral API.
7
+ * It calls the API at https://api.mistral.ai/v1 and uses the `MISTRAL_API_KEY` env variable by default.
8
+ */
9
+ export declare function Api(settings: PartialBaseUrlPartsApiConfigurationOptions & {
10
+ apiKey?: string;
11
+ }): MistralApiConfiguration;
5
12
  export declare function ChatTextGenerator(settings: MistralChatModelSettings): MistralChatModel;
6
13
  export declare function TextEmbedder(settings: MistralTextEmbeddingModelSettings): MistralTextEmbeddingModel;
7
14
  export { MistralChatMessage as ChatMessage, MistralChatPrompt as ChatPrompt, } from "./MistralChatModel.js";
@@ -1,6 +1,10 @@
1
- import { MistralApiConfiguration, } from "./MistralApiConfiguration.js";
2
- import { MistralTextEmbeddingModel, } from "./MistralTextEmbeddingModel.js";
1
+ import { MistralApiConfiguration } from "./MistralApiConfiguration.js";
3
2
  import { MistralChatModel, } from "./MistralChatModel.js";
3
+ import { MistralTextEmbeddingModel, } from "./MistralTextEmbeddingModel.js";
4
+ /**
5
+ * Creates an API configuration for the Mistral API.
6
+ * It calls the API at https://api.mistral.ai/v1 and uses the `MISTRAL_API_KEY` env variable by default.
7
+ */
4
8
  export function Api(settings) {
5
9
  return new MistralApiConfiguration(settings);
6
10
  }
@@ -4,6 +4,7 @@ exports.MistralTextEmbeddingModel = void 0;
4
4
  const zod_1 = require("zod");
5
5
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
+ const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
7
8
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
8
9
  const MistralApiConfiguration_js_1 = require("./MistralApiConfiguration.cjs");
9
10
  const MistralError_js_1 = require("./MistralError.cjs");
@@ -62,7 +63,7 @@ class MistralTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
62
63
  encoding_format: encodingFormat,
63
64
  },
64
65
  failedResponseHandler: MistralError_js_1.failedMistralCallResponseHandler,
65
- successfulResponseHandler: (0, postToApi_js_1.createJsonResponseHandler)(MistralTextEmbeddingResponseSchema),
66
+ successfulResponseHandler: (0, postToApi_js_1.createJsonResponseHandler)((0, ZodSchema_js_1.zodSchema)(MistralTextEmbeddingResponseSchema)),
66
67
  abortSignal,
67
68
  }),
68
69
  });
@@ -1,6 +1,7 @@
1
1
  import { z } from "zod";
2
2
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
+ import { zodSchema } from "../../core/schema/ZodSchema.js";
4
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
6
  import { MistralApiConfiguration } from "./MistralApiConfiguration.js";
6
7
  import { failedMistralCallResponseHandler } from "./MistralError.js";
@@ -59,7 +60,7 @@ export class MistralTextEmbeddingModel extends AbstractModel {
59
60
  encoding_format: encodingFormat,
60
61
  },
61
62
  failedResponseHandler: failedMistralCallResponseHandler,
62
- successfulResponseHandler: createJsonResponseHandler(MistralTextEmbeddingResponseSchema),
63
+ successfulResponseHandler: createJsonResponseHandler(zodSchema(MistralTextEmbeddingResponseSchema)),
63
64
  abortSignal,
64
65
  }),
65
66
  });
@@ -2,13 +2,20 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.OllamaApiConfiguration = void 0;
4
4
  const BaseUrlApiConfiguration_js_1 = require("../../core/api/BaseUrlApiConfiguration.cjs");
5
- class OllamaApiConfiguration extends BaseUrlApiConfiguration_js_1.BaseUrlApiConfiguration {
6
- constructor({ baseUrl = "http://127.0.0.1:11434", retry, throttle, } = {}) {
5
+ /**
6
+ * Creates an API configuration for the Ollama API.
7
+ * It calls the API at http://127.0.0.1:11434 by default.
8
+ */
9
+ class OllamaApiConfiguration extends BaseUrlApiConfiguration_js_1.BaseUrlApiConfigurationWithDefaults {
10
+ constructor(settings = {}) {
7
11
  super({
8
- baseUrl,
9
- headers: {},
10
- retry,
11
- throttle,
12
+ ...settings,
13
+ baseUrlDefaults: {
14
+ protocol: "http",
15
+ host: "127.0.0.1",
16
+ port: "11434",
17
+ path: "",
18
+ },
12
19
  });
13
20
  }
14
21
  }
@@ -1,11 +1,8 @@
1
- import { BaseUrlApiConfiguration } from "../../core/api/BaseUrlApiConfiguration.js";
2
- import { RetryFunction } from "../../core/api/RetryFunction.js";
3
- import { ThrottleFunction } from "../../core/api/ThrottleFunction.js";
4
- export type OllamaApiConfigurationSettings = {
5
- baseUrl?: string;
6
- retry?: RetryFunction;
7
- throttle?: ThrottleFunction;
8
- };
9
- export declare class OllamaApiConfiguration extends BaseUrlApiConfiguration {
10
- constructor({ baseUrl, retry, throttle, }?: OllamaApiConfigurationSettings);
1
+ import { BaseUrlApiConfigurationWithDefaults, PartialBaseUrlPartsApiConfigurationOptions } from "../../core/api/BaseUrlApiConfiguration.js";
2
+ /**
3
+ * Creates an API configuration for the Ollama API.
4
+ * It calls the API at http://127.0.0.1:11434 by default.
5
+ */
6
+ export declare class OllamaApiConfiguration extends BaseUrlApiConfigurationWithDefaults {
7
+ constructor(settings?: PartialBaseUrlPartsApiConfigurationOptions);
11
8
  }
@@ -1,11 +1,18 @@
1
- import { BaseUrlApiConfiguration } from "../../core/api/BaseUrlApiConfiguration.js";
2
- export class OllamaApiConfiguration extends BaseUrlApiConfiguration {
3
- constructor({ baseUrl = "http://127.0.0.1:11434", retry, throttle, } = {}) {
1
+ import { BaseUrlApiConfigurationWithDefaults, } from "../../core/api/BaseUrlApiConfiguration.js";
2
+ /**
3
+ * Creates an API configuration for the Ollama API.
4
+ * It calls the API at http://127.0.0.1:11434 by default.
5
+ */
6
+ export class OllamaApiConfiguration extends BaseUrlApiConfigurationWithDefaults {
7
+ constructor(settings = {}) {
4
8
  super({
5
- baseUrl,
6
- headers: {},
7
- retry,
8
- throttle,
9
+ ...settings,
10
+ baseUrlDefaults: {
11
+ protocol: "http",
12
+ host: "127.0.0.1",
13
+ port: "11434",
14
+ path: "",
15
+ },
9
16
  });
10
17
  }
11
18
  }
@@ -197,7 +197,7 @@ const ollamaChatResponseSchema = zod_1.z.object({
197
197
  eval_count: zod_1.z.number(),
198
198
  eval_duration: zod_1.z.number(),
199
199
  });
200
- const ollamaChatStreamChunkSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.discriminatedUnion("done", [
200
+ const ollamaChatStreamChunkSchema = zod_1.z.discriminatedUnion("done", [
201
201
  zod_1.z.object({
202
202
  done: zod_1.z.literal(false),
203
203
  model: zod_1.z.string(),
@@ -218,7 +218,7 @@ const ollamaChatStreamChunkSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.discrim
218
218
  eval_count: zod_1.z.number(),
219
219
  eval_duration: zod_1.z.number(),
220
220
  }),
221
- ]));
221
+ ]);
222
222
  exports.OllamaChatResponseFormat = {
223
223
  /**
224
224
  * Returns the response as a JSON object.
@@ -229,7 +229,7 @@ exports.OllamaChatResponseFormat = {
229
229
  const responseBody = await response.text();
230
230
  const parsedResult = (0, parseJSON_js_1.safeParseJSON)({
231
231
  text: responseBody,
232
- schema: new ZodSchema_js_1.ZodSchema(zod_1.z.union([
232
+ schema: (0, ZodSchema_js_1.zodSchema)(zod_1.z.union([
233
233
  ollamaChatResponseSchema,
234
234
  zod_1.z.object({
235
235
  done: zod_1.z.literal(false),
@@ -267,6 +267,6 @@ exports.OllamaChatResponseFormat = {
267
267
  */
268
268
  deltaIterable: {
269
269
  stream: true,
270
- handler: (0, createJsonStreamResponseHandler_js_1.createJsonStreamResponseHandler)(ollamaChatStreamChunkSchema),
270
+ handler: (0, createJsonStreamResponseHandler_js_1.createJsonStreamResponseHandler)((0, ZodSchema_js_1.zodSchema)(ollamaChatStreamChunkSchema)),
271
271
  },
272
272
  };
@@ -2,7 +2,6 @@ import { z } from "zod";
2
2
  import { FunctionOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
- import { ZodSchema } from "../../core/schema/ZodSchema.js";
6
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
7
6
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
8
7
  import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
@@ -145,7 +144,29 @@ declare const ollamaChatResponseSchema: z.ZodObject<{
145
144
  prompt_eval_duration?: number | undefined;
146
145
  }>;
147
146
  export type OllamaChatResponse = z.infer<typeof ollamaChatResponseSchema>;
148
- declare const ollamaChatStreamChunkSchema: ZodSchema<{
147
+ declare const ollamaChatStreamChunkSchema: z.ZodDiscriminatedUnion<"done", [z.ZodObject<{
148
+ done: z.ZodLiteral<false>;
149
+ model: z.ZodString;
150
+ created_at: z.ZodString;
151
+ message: z.ZodObject<{
152
+ role: z.ZodString;
153
+ content: z.ZodString;
154
+ }, "strip", z.ZodTypeAny, {
155
+ role: string;
156
+ content: string;
157
+ }, {
158
+ role: string;
159
+ content: string;
160
+ }>;
161
+ }, "strip", z.ZodTypeAny, {
162
+ message: {
163
+ role: string;
164
+ content: string;
165
+ };
166
+ model: string;
167
+ done: false;
168
+ created_at: string;
169
+ }, {
149
170
  message: {
150
171
  role: string;
151
172
  content: string;
@@ -153,7 +174,17 @@ declare const ollamaChatStreamChunkSchema: ZodSchema<{
153
174
  model: string;
154
175
  done: false;
155
176
  created_at: string;
156
- } | {
177
+ }>, z.ZodObject<{
178
+ done: z.ZodLiteral<true>;
179
+ model: z.ZodString;
180
+ created_at: z.ZodString;
181
+ total_duration: z.ZodNumber;
182
+ load_duration: z.ZodOptional<z.ZodNumber>;
183
+ prompt_eval_count: z.ZodNumber;
184
+ prompt_eval_duration: z.ZodOptional<z.ZodNumber>;
185
+ eval_count: z.ZodNumber;
186
+ eval_duration: z.ZodNumber;
187
+ }, "strip", z.ZodTypeAny, {
157
188
  model: string;
158
189
  done: true;
159
190
  created_at: string;
@@ -163,8 +194,18 @@ declare const ollamaChatStreamChunkSchema: ZodSchema<{
163
194
  eval_duration: number;
164
195
  load_duration?: number | undefined;
165
196
  prompt_eval_duration?: number | undefined;
166
- }>;
167
- export type OllamaChatStreamChunk = (typeof ollamaChatStreamChunkSchema)["_type"];
197
+ }, {
198
+ model: string;
199
+ done: true;
200
+ created_at: string;
201
+ total_duration: number;
202
+ prompt_eval_count: number;
203
+ eval_count: number;
204
+ eval_duration: number;
205
+ load_duration?: number | undefined;
206
+ prompt_eval_duration?: number | undefined;
207
+ }>]>;
208
+ export type OllamaChatStreamChunk = z.infer<typeof ollamaChatStreamChunkSchema>;
168
209
  export type OllamaChatResponseFormatType<T> = {
169
210
  stream: boolean;
170
211
  handler: ResponseHandler<T>;
@@ -2,7 +2,7 @@ import { z } from "zod";
2
2
  import { ApiCallError } from "../../core/api/ApiCallError.js";
3
3
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
4
4
  import { postJsonToApi } from "../../core/api/postToApi.js";
5
- import { ZodSchema } from "../../core/schema/ZodSchema.js";
5
+ import { zodSchema } from "../../core/schema/ZodSchema.js";
6
6
  import { safeParseJSON } from "../../core/schema/parseJSON.js";
7
7
  import { AbstractModel } from "../../model-function/AbstractModel.js";
8
8
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
@@ -193,7 +193,7 @@ const ollamaChatResponseSchema = z.object({
193
193
  eval_count: z.number(),
194
194
  eval_duration: z.number(),
195
195
  });
196
- const ollamaChatStreamChunkSchema = new ZodSchema(z.discriminatedUnion("done", [
196
+ const ollamaChatStreamChunkSchema = z.discriminatedUnion("done", [
197
197
  z.object({
198
198
  done: z.literal(false),
199
199
  model: z.string(),
@@ -214,7 +214,7 @@ const ollamaChatStreamChunkSchema = new ZodSchema(z.discriminatedUnion("done", [
214
214
  eval_count: z.number(),
215
215
  eval_duration: z.number(),
216
216
  }),
217
- ]));
217
+ ]);
218
218
  export const OllamaChatResponseFormat = {
219
219
  /**
220
220
  * Returns the response as a JSON object.
@@ -225,7 +225,7 @@ export const OllamaChatResponseFormat = {
225
225
  const responseBody = await response.text();
226
226
  const parsedResult = safeParseJSON({
227
227
  text: responseBody,
228
- schema: new ZodSchema(z.union([
228
+ schema: zodSchema(z.union([
229
229
  ollamaChatResponseSchema,
230
230
  z.object({
231
231
  done: z.literal(false),
@@ -263,6 +263,6 @@ export const OllamaChatResponseFormat = {
263
263
  */
264
264
  deltaIterable: {
265
265
  stream: true,
266
- handler: createJsonStreamResponseHandler(ollamaChatStreamChunkSchema),
266
+ handler: createJsonStreamResponseHandler(zodSchema(ollamaChatStreamChunkSchema)),
267
267
  },
268
268
  };
@@ -201,7 +201,7 @@ const ollamaCompletionResponseSchema = zod_1.z.object({
201
201
  eval_duration: zod_1.z.number(),
202
202
  context: zod_1.z.array(zod_1.z.number()).optional(),
203
203
  });
204
- const ollamaCompletionStreamChunkSchema = (0, ZodSchema_js_1.zodSchema)(zod_1.z.discriminatedUnion("done", [
204
+ const ollamaCompletionStreamChunkSchema = zod_1.z.discriminatedUnion("done", [
205
205
  zod_1.z.object({
206
206
  done: zod_1.z.literal(false),
207
207
  model: zod_1.z.string(),
@@ -222,7 +222,7 @@ const ollamaCompletionStreamChunkSchema = (0, ZodSchema_js_1.zodSchema)(zod_1.z.
222
222
  eval_duration: zod_1.z.number(),
223
223
  context: zod_1.z.array(zod_1.z.number()).optional(),
224
224
  }),
225
- ]));
225
+ ]);
226
226
  exports.OllamaCompletionResponseFormat = {
227
227
  /**
228
228
  * Returns the response as a JSON object.
@@ -233,7 +233,7 @@ exports.OllamaCompletionResponseFormat = {
233
233
  const responseBody = await response.text();
234
234
  const parsedResult = (0, parseJSON_js_1.safeParseJSON)({
235
235
  text: responseBody,
236
- schema: new ZodSchema_js_1.ZodSchema(zod_1.z.union([
236
+ schema: (0, ZodSchema_js_1.zodSchema)(zod_1.z.union([
237
237
  ollamaCompletionResponseSchema,
238
238
  zod_1.z.object({
239
239
  done: zod_1.z.literal(false),
@@ -272,6 +272,6 @@ exports.OllamaCompletionResponseFormat = {
272
272
  */
273
273
  deltaIterable: {
274
274
  stream: true,
275
- handler: (0, createJsonStreamResponseHandler_js_1.createJsonStreamResponseHandler)(ollamaCompletionStreamChunkSchema),
275
+ handler: (0, createJsonStreamResponseHandler_js_1.createJsonStreamResponseHandler)((0, ZodSchema_js_1.zodSchema)(ollamaCompletionStreamChunkSchema)),
276
276
  },
277
277
  };
@@ -2,7 +2,6 @@ import { z } from "zod";
2
2
  import { FunctionOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
- import { ZodSchema } from "../../core/schema/ZodSchema.js";
6
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
7
6
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
8
7
  import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
@@ -139,12 +138,48 @@ declare const ollamaCompletionResponseSchema: z.ZodObject<{
139
138
  context?: number[] | undefined;
140
139
  }>;
141
140
  export type OllamaCompletionResponse = z.infer<typeof ollamaCompletionResponseSchema>;
142
- declare const ollamaCompletionStreamChunkSchema: ZodSchema<{
141
+ declare const ollamaCompletionStreamChunkSchema: z.ZodDiscriminatedUnion<"done", [z.ZodObject<{
142
+ done: z.ZodLiteral<false>;
143
+ model: z.ZodString;
144
+ created_at: z.ZodString;
145
+ response: z.ZodString;
146
+ }, "strip", z.ZodTypeAny, {
147
+ response: string;
148
+ model: string;
149
+ done: false;
150
+ created_at: string;
151
+ }, {
143
152
  response: string;
144
153
  model: string;
145
154
  done: false;
146
155
  created_at: string;
147
- } | {
156
+ }>, z.ZodObject<{
157
+ done: z.ZodLiteral<true>;
158
+ model: z.ZodString;
159
+ created_at: z.ZodString;
160
+ total_duration: z.ZodNumber;
161
+ load_duration: z.ZodOptional<z.ZodNumber>;
162
+ sample_count: z.ZodOptional<z.ZodNumber>;
163
+ sample_duration: z.ZodOptional<z.ZodNumber>;
164
+ prompt_eval_count: z.ZodNumber;
165
+ prompt_eval_duration: z.ZodOptional<z.ZodNumber>;
166
+ eval_count: z.ZodNumber;
167
+ eval_duration: z.ZodNumber;
168
+ context: z.ZodOptional<z.ZodArray<z.ZodNumber, "many">>;
169
+ }, "strip", z.ZodTypeAny, {
170
+ model: string;
171
+ done: true;
172
+ created_at: string;
173
+ total_duration: number;
174
+ prompt_eval_count: number;
175
+ eval_count: number;
176
+ eval_duration: number;
177
+ load_duration?: number | undefined;
178
+ sample_count?: number | undefined;
179
+ sample_duration?: number | undefined;
180
+ prompt_eval_duration?: number | undefined;
181
+ context?: number[] | undefined;
182
+ }, {
148
183
  model: string;
149
184
  done: true;
150
185
  created_at: string;
@@ -157,8 +192,8 @@ declare const ollamaCompletionStreamChunkSchema: ZodSchema<{
157
192
  sample_duration?: number | undefined;
158
193
  prompt_eval_duration?: number | undefined;
159
194
  context?: number[] | undefined;
160
- }>;
161
- export type OllamaCompletionStreamChunk = (typeof ollamaCompletionStreamChunkSchema)["_type"];
195
+ }>]>;
196
+ export type OllamaCompletionStreamChunk = z.infer<typeof ollamaCompletionStreamChunkSchema>;
162
197
  export type OllamaCompletionResponseFormatType<T> = {
163
198
  stream: boolean;
164
199
  handler: ResponseHandler<T>;
@@ -2,7 +2,7 @@ import { z } from "zod";
2
2
  import { ApiCallError } from "../../core/api/ApiCallError.js";
3
3
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
4
4
  import { postJsonToApi } from "../../core/api/postToApi.js";
5
- import { ZodSchema, zodSchema } from "../../core/schema/ZodSchema.js";
5
+ import { zodSchema } from "../../core/schema/ZodSchema.js";
6
6
  import { safeParseJSON } from "../../core/schema/parseJSON.js";
7
7
  import { AbstractModel } from "../../model-function/AbstractModel.js";
8
8
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
@@ -197,7 +197,7 @@ const ollamaCompletionResponseSchema = z.object({
197
197
  eval_duration: z.number(),
198
198
  context: z.array(z.number()).optional(),
199
199
  });
200
- const ollamaCompletionStreamChunkSchema = zodSchema(z.discriminatedUnion("done", [
200
+ const ollamaCompletionStreamChunkSchema = z.discriminatedUnion("done", [
201
201
  z.object({
202
202
  done: z.literal(false),
203
203
  model: z.string(),
@@ -218,7 +218,7 @@ const ollamaCompletionStreamChunkSchema = zodSchema(z.discriminatedUnion("done",
218
218
  eval_duration: z.number(),
219
219
  context: z.array(z.number()).optional(),
220
220
  }),
221
- ]));
221
+ ]);
222
222
  export const OllamaCompletionResponseFormat = {
223
223
  /**
224
224
  * Returns the response as a JSON object.
@@ -229,7 +229,7 @@ export const OllamaCompletionResponseFormat = {
229
229
  const responseBody = await response.text();
230
230
  const parsedResult = safeParseJSON({
231
231
  text: responseBody,
232
- schema: new ZodSchema(z.union([
232
+ schema: zodSchema(z.union([
233
233
  ollamaCompletionResponseSchema,
234
234
  z.object({
235
235
  done: z.literal(false),
@@ -268,6 +268,6 @@ export const OllamaCompletionResponseFormat = {
268
268
  */
269
269
  deltaIterable: {
270
270
  stream: true,
271
- handler: createJsonStreamResponseHandler(ollamaCompletionStreamChunkSchema),
271
+ handler: createJsonStreamResponseHandler(zodSchema(ollamaCompletionStreamChunkSchema)),
272
272
  },
273
273
  };
@@ -4,10 +4,10 @@ exports.failedOllamaCallResponseHandler = void 0;
4
4
  const zod_1 = require("zod");
5
5
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
6
6
  const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
7
- const ollamaErrorDataSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.object({
7
+ const ollamaErrorDataSchema = zod_1.z.object({
8
8
  error: zod_1.z.string(),
9
- }));
9
+ });
10
10
  exports.failedOllamaCallResponseHandler = (0, postToApi_js_1.createJsonErrorResponseHandler)({
11
- errorSchema: ollamaErrorDataSchema,
11
+ errorSchema: (0, ZodSchema_js_1.zodSchema)(ollamaErrorDataSchema),
12
12
  errorToMessage: (error) => error.error,
13
13
  });
@@ -1,9 +1,13 @@
1
+ import { z } from "zod";
1
2
  import { ApiCallError } from "../../core/api/ApiCallError.js";
2
3
  import { ResponseHandler } from "../../core/api/postToApi.js";
3
- import { ZodSchema } from "../../core/schema/ZodSchema.js";
4
- declare const ollamaErrorDataSchema: ZodSchema<{
4
+ declare const ollamaErrorDataSchema: z.ZodObject<{
5
+ error: z.ZodString;
6
+ }, "strip", z.ZodTypeAny, {
7
+ error: string;
8
+ }, {
5
9
  error: string;
6
10
  }>;
7
- export type OllamaErrorData = (typeof ollamaErrorDataSchema)["_type"];
11
+ export type OllamaErrorData = z.infer<typeof ollamaErrorDataSchema>;
8
12
  export declare const failedOllamaCallResponseHandler: ResponseHandler<ApiCallError>;
9
13
  export {};
@@ -1,10 +1,10 @@
1
1
  import { z } from "zod";
2
2
  import { createJsonErrorResponseHandler, } from "../../core/api/postToApi.js";
3
- import { ZodSchema } from "../../core/schema/ZodSchema.js";
4
- const ollamaErrorDataSchema = new ZodSchema(z.object({
3
+ import { zodSchema } from "../../core/schema/ZodSchema.js";
4
+ const ollamaErrorDataSchema = z.object({
5
5
  error: z.string(),
6
- }));
6
+ });
7
7
  export const failedOllamaCallResponseHandler = createJsonErrorResponseHandler({
8
- errorSchema: ollamaErrorDataSchema,
8
+ errorSchema: zodSchema(ollamaErrorDataSchema),
9
9
  errorToMessage: (error) => error.error,
10
10
  });
@@ -1,10 +1,14 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.TextEmbedder = exports.ChatTextGenerator = exports.CompletionTextGenerator = exports.Api = void 0;
4
+ const OllamaApiConfiguration_js_1 = require("./OllamaApiConfiguration.cjs");
4
5
  const OllamaChatModel_js_1 = require("./OllamaChatModel.cjs");
5
- const OllamaTextEmbeddingModel_js_1 = require("./OllamaTextEmbeddingModel.cjs");
6
6
  const OllamaCompletionModel_js_1 = require("./OllamaCompletionModel.cjs");
7
- const OllamaApiConfiguration_js_1 = require("./OllamaApiConfiguration.cjs");
7
+ const OllamaTextEmbeddingModel_js_1 = require("./OllamaTextEmbeddingModel.cjs");
8
+ /**
9
+ * Creates an API configuration for the Ollama API.
10
+ * It calls the API at http://127.0.0.1:11434 by default.
11
+ */
8
12
  function Api(settings) {
9
13
  return new OllamaApiConfiguration_js_1.OllamaApiConfiguration(settings);
10
14
  }
@@ -1,8 +1,13 @@
1
+ import { PartialBaseUrlPartsApiConfigurationOptions } from "../../core/api/BaseUrlApiConfiguration.js";
2
+ import { OllamaApiConfiguration } from "./OllamaApiConfiguration.js";
1
3
  import { OllamaChatModel, OllamaChatModelSettings } from "./OllamaChatModel.js";
2
- import { OllamaTextEmbeddingModel, OllamaTextEmbeddingModelSettings } from "./OllamaTextEmbeddingModel.js";
3
4
  import { OllamaCompletionModel, OllamaCompletionModelSettings } from "./OllamaCompletionModel.js";
4
- import { OllamaApiConfiguration, OllamaApiConfigurationSettings } from "./OllamaApiConfiguration.js";
5
- export declare function Api(settings: OllamaApiConfigurationSettings): OllamaApiConfiguration;
5
+ import { OllamaTextEmbeddingModel, OllamaTextEmbeddingModelSettings } from "./OllamaTextEmbeddingModel.js";
6
+ /**
7
+ * Creates an API configuration for the Ollama API.
8
+ * It calls the API at http://127.0.0.1:11434 by default.
9
+ */
10
+ export declare function Api(settings: PartialBaseUrlPartsApiConfigurationOptions): OllamaApiConfiguration;
6
11
  export declare function CompletionTextGenerator<CONTEXT_WINDOW_SIZE extends number>(settings: OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>): OllamaCompletionModel<CONTEXT_WINDOW_SIZE>;
7
12
  export declare function ChatTextGenerator(settings: OllamaChatModelSettings): OllamaChatModel;
8
13
  export declare function TextEmbedder(settings: OllamaTextEmbeddingModelSettings): OllamaTextEmbeddingModel;
@@ -1,7 +1,11 @@
1
+ import { OllamaApiConfiguration } from "./OllamaApiConfiguration.js";
1
2
  import { OllamaChatModel } from "./OllamaChatModel.js";
2
- import { OllamaTextEmbeddingModel, } from "./OllamaTextEmbeddingModel.js";
3
3
  import { OllamaCompletionModel, } from "./OllamaCompletionModel.js";
4
- import { OllamaApiConfiguration, } from "./OllamaApiConfiguration.js";
4
+ import { OllamaTextEmbeddingModel, } from "./OllamaTextEmbeddingModel.js";
5
+ /**
6
+ * Creates an API configuration for the Ollama API.
7
+ * It calls the API at http://127.0.0.1:11434 by default.
8
+ */
5
9
  export function Api(settings) {
6
10
  return new OllamaApiConfiguration(settings);
7
11
  }
@@ -4,6 +4,7 @@ exports.OllamaTextEmbeddingModel = void 0;
4
4
  const zod_1 = require("zod");
5
5
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
+ const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
7
8
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
8
9
  const OllamaApiConfiguration_js_1 = require("./OllamaApiConfiguration.cjs");
9
10
  const OllamaError_js_1 = require("./OllamaError.cjs");
@@ -36,13 +37,21 @@ class OllamaTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
36
37
  if (texts.length > this.maxValuesPerCall) {
37
38
  throw new Error(`The Ollama embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
38
39
  }
40
+ const api = this.settings.api ?? new OllamaApiConfiguration_js_1.OllamaApiConfiguration();
41
+ const abortSignal = options?.run?.abortSignal;
39
42
  return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
40
- retry: this.settings.api?.retry,
41
- throttle: this.settings.api?.throttle,
42
- call: async () => callOllamaEmbeddingAPI({
43
- ...this.settings,
44
- abortSignal: options?.run?.abortSignal,
45
- prompt: texts[0],
43
+ retry: api.retry,
44
+ throttle: api.throttle,
45
+ call: async () => (0, postToApi_js_1.postJsonToApi)({
46
+ url: api.assembleUrl(`/api/embeddings`),
47
+ headers: api.headers,
48
+ body: {
49
+ model: this.settings.model,
50
+ prompt: texts[0],
51
+ },
52
+ failedResponseHandler: OllamaError_js_1.failedOllamaCallResponseHandler,
53
+ successfulResponseHandler: (0, postToApi_js_1.createJsonResponseHandler)((0, ZodSchema_js_1.zodSchema)(ollamaTextEmbeddingResponseSchema)),
54
+ abortSignal,
46
55
  }),
47
56
  });
48
57
  }
@@ -66,13 +75,3 @@ exports.OllamaTextEmbeddingModel = OllamaTextEmbeddingModel;
66
75
  const ollamaTextEmbeddingResponseSchema = zod_1.z.object({
67
76
  embedding: zod_1.z.array(zod_1.z.number()),
68
77
  });
69
- async function callOllamaEmbeddingAPI({ api = new OllamaApiConfiguration_js_1.OllamaApiConfiguration(), abortSignal, model, prompt, }) {
70
- return (0, postToApi_js_1.postJsonToApi)({
71
- url: api.assembleUrl(`/api/embeddings`),
72
- headers: api.headers,
73
- body: { model, prompt },
74
- failedResponseHandler: OllamaError_js_1.failedOllamaCallResponseHandler,
75
- successfulResponseHandler: (0, postToApi_js_1.createJsonResponseHandler)(ollamaTextEmbeddingResponseSchema),
76
- abortSignal,
77
- });
78
- }