modelfusion 0.109.0 → 0.111.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (230) hide show
  1. package/CHANGELOG.md +25 -0
  2. package/core/api/ApiFacade.cjs +20 -0
  3. package/core/api/ApiFacade.d.ts +4 -0
  4. package/core/api/ApiFacade.js +4 -0
  5. package/core/api/BaseUrlApiConfiguration.cjs +47 -5
  6. package/core/api/BaseUrlApiConfiguration.d.ts +23 -6
  7. package/core/api/BaseUrlApiConfiguration.js +45 -4
  8. package/core/api/BaseUrlApiConfiguration.test.cjs +11 -0
  9. package/core/api/BaseUrlApiConfiguration.test.d.ts +1 -0
  10. package/core/api/BaseUrlApiConfiguration.test.js +9 -0
  11. package/core/api/callWithRetryAndThrottle.cjs +3 -3
  12. package/core/api/callWithRetryAndThrottle.js +3 -3
  13. package/core/api/index.cjs +15 -2
  14. package/core/api/index.d.ts +2 -2
  15. package/core/api/index.js +2 -2
  16. package/core/api/postToApi.cjs +28 -5
  17. package/core/api/postToApi.d.ts +5 -4
  18. package/core/api/postToApi.js +26 -4
  19. package/core/api/throttleOff.cjs +8 -0
  20. package/core/api/throttleOff.d.ts +5 -0
  21. package/core/api/throttleOff.js +4 -0
  22. package/{extension → internal}/index.cjs +2 -7
  23. package/{extension → internal}/index.d.ts +1 -1
  24. package/{extension → internal}/index.js +1 -1
  25. package/model-function/generate-structure/generateStructure.d.ts +1 -1
  26. package/model-provider/anthropic/AnthropicApiConfiguration.cjs +14 -6
  27. package/model-provider/anthropic/AnthropicApiConfiguration.d.ts +7 -8
  28. package/model-provider/anthropic/AnthropicApiConfiguration.js +15 -7
  29. package/model-provider/anthropic/AnthropicError.cjs +7 -27
  30. package/model-provider/anthropic/AnthropicError.d.ts +21 -16
  31. package/model-provider/anthropic/AnthropicError.js +7 -25
  32. package/model-provider/anthropic/AnthropicFacade.cjs +10 -1
  33. package/model-provider/anthropic/AnthropicFacade.d.ts +9 -0
  34. package/model-provider/anthropic/AnthropicFacade.js +8 -0
  35. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +22 -24
  36. package/model-provider/anthropic/AnthropicTextGenerationModel.js +23 -25
  37. package/model-provider/anthropic/index.cjs +1 -4
  38. package/model-provider/anthropic/index.d.ts +1 -1
  39. package/model-provider/anthropic/index.js +0 -1
  40. package/model-provider/automatic1111/Automatic1111ApiConfiguration.cjs +12 -4
  41. package/model-provider/automatic1111/Automatic1111ApiConfiguration.d.ts +3 -3
  42. package/model-provider/automatic1111/Automatic1111ApiConfiguration.js +12 -4
  43. package/model-provider/automatic1111/Automatic1111Error.cjs +3 -3
  44. package/model-provider/automatic1111/Automatic1111Error.d.ts +13 -3
  45. package/model-provider/automatic1111/Automatic1111Error.js +4 -4
  46. package/model-provider/automatic1111/Automatic1111Facade.cjs +9 -9
  47. package/model-provider/automatic1111/Automatic1111Facade.d.ts +6 -6
  48. package/model-provider/automatic1111/Automatic1111Facade.js +7 -7
  49. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +11 -7
  50. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +11 -7
  51. package/model-provider/cohere/CohereApiConfiguration.cjs +14 -6
  52. package/model-provider/cohere/CohereApiConfiguration.d.ts +7 -8
  53. package/model-provider/cohere/CohereApiConfiguration.js +15 -7
  54. package/model-provider/cohere/CohereError.cjs +8 -43
  55. package/model-provider/cohere/CohereError.d.ts +9 -16
  56. package/model-provider/cohere/CohereError.js +8 -41
  57. package/model-provider/cohere/CohereFacade.cjs +12 -3
  58. package/model-provider/cohere/CohereFacade.d.ts +11 -2
  59. package/model-provider/cohere/CohereFacade.js +10 -2
  60. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +18 -22
  61. package/model-provider/cohere/CohereTextEmbeddingModel.js +18 -22
  62. package/model-provider/cohere/CohereTextGenerationModel.cjs +31 -39
  63. package/model-provider/cohere/CohereTextGenerationModel.d.ts +110 -8
  64. package/model-provider/cohere/CohereTextGenerationModel.js +31 -39
  65. package/model-provider/cohere/CohereTokenizer.cjs +32 -41
  66. package/model-provider/cohere/CohereTokenizer.d.ts +2 -2
  67. package/model-provider/cohere/CohereTokenizer.js +32 -41
  68. package/model-provider/cohere/index.cjs +1 -3
  69. package/model-provider/cohere/index.d.ts +1 -1
  70. package/model-provider/cohere/index.js +0 -1
  71. package/model-provider/elevenlabs/ElevenLabsApiConfiguration.cjs +14 -6
  72. package/model-provider/elevenlabs/ElevenLabsApiConfiguration.d.ts +7 -8
  73. package/model-provider/elevenlabs/ElevenLabsApiConfiguration.js +15 -7
  74. package/model-provider/elevenlabs/ElevenLabsFacade.cjs +10 -1
  75. package/model-provider/elevenlabs/ElevenLabsFacade.d.ts +9 -0
  76. package/model-provider/elevenlabs/ElevenLabsFacade.js +8 -0
  77. package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +42 -53
  78. package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +44 -55
  79. package/model-provider/huggingface/HuggingFaceApiConfiguration.cjs +14 -6
  80. package/model-provider/huggingface/HuggingFaceApiConfiguration.d.ts +7 -8
  81. package/model-provider/huggingface/HuggingFaceApiConfiguration.js +15 -7
  82. package/model-provider/huggingface/HuggingFaceError.cjs +7 -29
  83. package/model-provider/huggingface/HuggingFaceError.d.ts +9 -16
  84. package/model-provider/huggingface/HuggingFaceError.js +7 -27
  85. package/model-provider/huggingface/HuggingFaceFacade.cjs +10 -1
  86. package/model-provider/huggingface/HuggingFaceFacade.d.ts +9 -0
  87. package/model-provider/huggingface/HuggingFaceFacade.js +8 -0
  88. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +17 -27
  89. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +17 -27
  90. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +22 -23
  91. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +22 -23
  92. package/model-provider/huggingface/index.cjs +1 -3
  93. package/model-provider/huggingface/index.d.ts +1 -1
  94. package/model-provider/huggingface/index.js +0 -1
  95. package/model-provider/llamacpp/LlamaCppApiConfiguration.cjs +13 -6
  96. package/model-provider/llamacpp/LlamaCppApiConfiguration.d.ts +7 -9
  97. package/model-provider/llamacpp/LlamaCppApiConfiguration.js +14 -7
  98. package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +4 -4
  99. package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +173 -5
  100. package/model-provider/llamacpp/LlamaCppCompletionModel.js +4 -4
  101. package/model-provider/llamacpp/LlamaCppError.cjs +7 -27
  102. package/model-provider/llamacpp/LlamaCppError.d.ts +9 -16
  103. package/model-provider/llamacpp/LlamaCppError.js +7 -25
  104. package/model-provider/llamacpp/LlamaCppFacade.cjs +10 -2
  105. package/model-provider/llamacpp/LlamaCppFacade.d.ts +8 -1
  106. package/model-provider/llamacpp/LlamaCppFacade.js +8 -1
  107. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +10 -14
  108. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +10 -14
  109. package/model-provider/llamacpp/LlamaCppTokenizer.cjs +14 -18
  110. package/model-provider/llamacpp/LlamaCppTokenizer.js +14 -18
  111. package/model-provider/llamacpp/index.cjs +1 -3
  112. package/model-provider/llamacpp/index.d.ts +1 -1
  113. package/model-provider/llamacpp/index.js +0 -1
  114. package/model-provider/lmnt/LmntApiConfiguration.cjs +14 -6
  115. package/model-provider/lmnt/LmntApiConfiguration.d.ts +7 -8
  116. package/model-provider/lmnt/LmntApiConfiguration.js +15 -7
  117. package/model-provider/lmnt/LmntFacade.cjs +11 -2
  118. package/model-provider/lmnt/LmntFacade.d.ts +10 -1
  119. package/model-provider/lmnt/LmntFacade.js +9 -1
  120. package/model-provider/lmnt/LmntSpeechModel.cjs +53 -41
  121. package/model-provider/lmnt/LmntSpeechModel.d.ts +51 -3
  122. package/model-provider/lmnt/LmntSpeechModel.js +54 -42
  123. package/model-provider/mistral/MistralApiConfiguration.cjs +14 -6
  124. package/model-provider/mistral/MistralApiConfiguration.d.ts +9 -11
  125. package/model-provider/mistral/MistralApiConfiguration.js +15 -7
  126. package/model-provider/mistral/MistralChatModel.cjs +4 -4
  127. package/model-provider/mistral/MistralChatModel.d.ts +48 -3
  128. package/model-provider/mistral/MistralChatModel.js +5 -5
  129. package/model-provider/mistral/MistralError.cjs +3 -3
  130. package/model-provider/mistral/MistralError.d.ts +15 -3
  131. package/model-provider/mistral/MistralError.js +4 -4
  132. package/model-provider/mistral/MistralFacade.cjs +5 -1
  133. package/model-provider/mistral/MistralFacade.d.ts +10 -3
  134. package/model-provider/mistral/MistralFacade.js +6 -2
  135. package/model-provider/mistral/MistralTextEmbeddingModel.cjs +2 -1
  136. package/model-provider/mistral/MistralTextEmbeddingModel.js +2 -1
  137. package/model-provider/ollama/OllamaApiConfiguration.cjs +13 -6
  138. package/model-provider/ollama/OllamaApiConfiguration.d.ts +7 -10
  139. package/model-provider/ollama/OllamaApiConfiguration.js +14 -7
  140. package/model-provider/ollama/OllamaChatModel.cjs +4 -4
  141. package/model-provider/ollama/OllamaChatModel.d.ts +46 -5
  142. package/model-provider/ollama/OllamaChatModel.js +5 -5
  143. package/model-provider/ollama/OllamaCompletionModel.cjs +4 -4
  144. package/model-provider/ollama/OllamaCompletionModel.d.ts +40 -5
  145. package/model-provider/ollama/OllamaCompletionModel.js +5 -5
  146. package/model-provider/ollama/OllamaError.cjs +3 -3
  147. package/model-provider/ollama/OllamaError.d.ts +7 -3
  148. package/model-provider/ollama/OllamaError.js +4 -4
  149. package/model-provider/ollama/OllamaFacade.cjs +6 -2
  150. package/model-provider/ollama/OllamaFacade.d.ts +8 -3
  151. package/model-provider/ollama/OllamaFacade.js +6 -2
  152. package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +15 -16
  153. package/model-provider/ollama/OllamaTextEmbeddingModel.js +15 -16
  154. package/model-provider/openai/AbstractOpenAIChatModel.cjs +4 -4
  155. package/model-provider/openai/AbstractOpenAIChatModel.d.ts +148 -2
  156. package/model-provider/openai/AbstractOpenAIChatModel.js +4 -4
  157. package/model-provider/openai/AbstractOpenAICompletionModel.cjs +30 -32
  158. package/model-provider/openai/AbstractOpenAICompletionModel.js +30 -32
  159. package/model-provider/openai/AzureOpenAIApiConfiguration.d.ts +9 -8
  160. package/model-provider/openai/OpenAIApiConfiguration.cjs +14 -6
  161. package/model-provider/openai/OpenAIApiConfiguration.d.ts +7 -8
  162. package/model-provider/openai/OpenAIApiConfiguration.js +15 -7
  163. package/model-provider/openai/OpenAICompletionModel.cjs +3 -91
  164. package/model-provider/openai/OpenAICompletionModel.d.ts +3 -71
  165. package/model-provider/openai/OpenAICompletionModel.js +3 -91
  166. package/model-provider/openai/OpenAIError.cjs +8 -8
  167. package/model-provider/openai/OpenAIError.d.ts +27 -3
  168. package/model-provider/openai/OpenAIError.js +9 -9
  169. package/model-provider/openai/OpenAIFacade.cjs +23 -2
  170. package/model-provider/openai/OpenAIFacade.d.ts +20 -2
  171. package/model-provider/openai/OpenAIFacade.js +20 -1
  172. package/model-provider/openai/OpenAIImageGenerationModel.cjs +20 -21
  173. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +1 -1
  174. package/model-provider/openai/OpenAIImageGenerationModel.js +20 -21
  175. package/model-provider/openai/OpenAISpeechModel.cjs +17 -22
  176. package/model-provider/openai/OpenAISpeechModel.js +17 -22
  177. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +18 -23
  178. package/model-provider/openai/OpenAITextEmbeddingModel.js +18 -23
  179. package/model-provider/openai/OpenAITranscriptionModel.cjs +42 -48
  180. package/model-provider/openai/OpenAITranscriptionModel.d.ts +14 -10
  181. package/model-provider/openai/OpenAITranscriptionModel.js +42 -48
  182. package/model-provider/openai/TikTokenTokenizer.cjs +0 -18
  183. package/model-provider/openai/TikTokenTokenizer.d.ts +3 -3
  184. package/model-provider/openai/TikTokenTokenizer.js +0 -18
  185. package/model-provider/openai-compatible/FireworksAIApiConfiguration.cjs +11 -7
  186. package/model-provider/openai-compatible/FireworksAIApiConfiguration.d.ts +4 -9
  187. package/model-provider/openai-compatible/FireworksAIApiConfiguration.js +12 -8
  188. package/model-provider/openai-compatible/OpenAICompatibleFacade.cjs +25 -1
  189. package/model-provider/openai-compatible/OpenAICompatibleFacade.d.ts +23 -0
  190. package/model-provider/openai-compatible/OpenAICompatibleFacade.js +22 -0
  191. package/model-provider/openai-compatible/TogetherAIApiConfiguration.cjs +11 -7
  192. package/model-provider/openai-compatible/TogetherAIApiConfiguration.d.ts +4 -9
  193. package/model-provider/openai-compatible/TogetherAIApiConfiguration.js +12 -8
  194. package/model-provider/stability/StabilityApiConfiguration.cjs +13 -12
  195. package/model-provider/stability/StabilityApiConfiguration.d.ts +4 -4
  196. package/model-provider/stability/StabilityApiConfiguration.js +13 -12
  197. package/model-provider/stability/StabilityError.cjs +3 -3
  198. package/model-provider/stability/StabilityError.d.ts +7 -3
  199. package/model-provider/stability/StabilityError.js +4 -4
  200. package/model-provider/stability/StabilityFacade.cjs +9 -9
  201. package/model-provider/stability/StabilityFacade.d.ts +8 -8
  202. package/model-provider/stability/StabilityFacade.js +7 -7
  203. package/model-provider/stability/StabilityImageGenerationModel.cjs +2 -1
  204. package/model-provider/stability/StabilityImageGenerationModel.js +2 -1
  205. package/model-provider/whispercpp/WhisperCppApiConfiguration.cjs +13 -6
  206. package/model-provider/whispercpp/WhisperCppApiConfiguration.d.ts +7 -10
  207. package/model-provider/whispercpp/WhisperCppApiConfiguration.js +14 -7
  208. package/model-provider/whispercpp/WhisperCppFacade.cjs +9 -5
  209. package/model-provider/whispercpp/WhisperCppFacade.d.ts +7 -2
  210. package/model-provider/whispercpp/WhisperCppFacade.js +8 -4
  211. package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +5 -2
  212. package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +6 -3
  213. package/package.json +5 -5
  214. package/tool/WebSearchTool.cjs +2 -2
  215. package/tool/WebSearchTool.d.ts +1 -2
  216. package/tool/WebSearchTool.js +3 -3
  217. package/vector-index/memory/MemoryVectorIndex.cjs +2 -2
  218. package/vector-index/memory/MemoryVectorIndex.js +3 -3
  219. package/core/api/BaseUrlPartsApiConfiguration.cjs +0 -53
  220. package/core/api/BaseUrlPartsApiConfiguration.d.ts +0 -26
  221. package/core/api/BaseUrlPartsApiConfiguration.js +0 -49
  222. package/core/api/throttleUnlimitedConcurrency.cjs +0 -8
  223. package/core/api/throttleUnlimitedConcurrency.d.ts +0 -5
  224. package/core/api/throttleUnlimitedConcurrency.js +0 -4
  225. package/model-provider/elevenlabs/ElevenLabsError.cjs +0 -30
  226. package/model-provider/elevenlabs/ElevenLabsError.d.ts +0 -3
  227. package/model-provider/elevenlabs/ElevenLabsError.js +0 -26
  228. package/model-provider/lmnt/LmntError.cjs +0 -30
  229. package/model-provider/lmnt/LmntError.d.ts +0 -3
  230. package/model-provider/lmnt/LmntError.js +0 -26
@@ -1,6 +1,7 @@
1
1
  import { z } from "zod";
2
2
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
+ import { zodSchema } from "../../core/schema/ZodSchema.js";
4
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
6
  import { OllamaApiConfiguration } from "./OllamaApiConfiguration.js";
6
7
  import { failedOllamaCallResponseHandler } from "./OllamaError.js";
@@ -33,13 +34,21 @@ export class OllamaTextEmbeddingModel extends AbstractModel {
33
34
  if (texts.length > this.maxValuesPerCall) {
34
35
  throw new Error(`The Ollama embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
35
36
  }
37
+ const api = this.settings.api ?? new OllamaApiConfiguration();
38
+ const abortSignal = options?.run?.abortSignal;
36
39
  return callWithRetryAndThrottle({
37
- retry: this.settings.api?.retry,
38
- throttle: this.settings.api?.throttle,
39
- call: async () => callOllamaEmbeddingAPI({
40
- ...this.settings,
41
- abortSignal: options?.run?.abortSignal,
42
- prompt: texts[0],
40
+ retry: api.retry,
41
+ throttle: api.throttle,
42
+ call: async () => postJsonToApi({
43
+ url: api.assembleUrl(`/api/embeddings`),
44
+ headers: api.headers,
45
+ body: {
46
+ model: this.settings.model,
47
+ prompt: texts[0],
48
+ },
49
+ failedResponseHandler: failedOllamaCallResponseHandler,
50
+ successfulResponseHandler: createJsonResponseHandler(zodSchema(ollamaTextEmbeddingResponseSchema)),
51
+ abortSignal,
43
52
  }),
44
53
  });
45
54
  }
@@ -62,13 +71,3 @@ export class OllamaTextEmbeddingModel extends AbstractModel {
62
71
  const ollamaTextEmbeddingResponseSchema = z.object({
63
72
  embedding: z.array(z.number()),
64
73
  });
65
- async function callOllamaEmbeddingAPI({ api = new OllamaApiConfiguration(), abortSignal, model, prompt, }) {
66
- return postJsonToApi({
67
- url: api.assembleUrl(`/api/embeddings`),
68
- headers: api.headers,
69
- body: { model, prompt },
70
- failedResponseHandler: failedOllamaCallResponseHandler,
71
- successfulResponseHandler: createJsonResponseHandler(ollamaTextEmbeddingResponseSchema),
72
- abortSignal,
73
- });
74
- }
@@ -232,7 +232,7 @@ const openAIChatResponseSchema = zod_1.z.object({
232
232
  total_tokens: zod_1.z.number(),
233
233
  }),
234
234
  });
235
- const openaiChatChunkSchema = (0, ZodSchema_js_1.zodSchema)(zod_1.z.object({
235
+ const openaiChatChunkSchema = zod_1.z.object({
236
236
  object: zod_1.z.literal("chat.completion.chunk"),
237
237
  id: zod_1.z.string(),
238
238
  choices: zod_1.z.array(zod_1.z.object({
@@ -271,20 +271,20 @@ const openaiChatChunkSchema = (0, ZodSchema_js_1.zodSchema)(zod_1.z.object({
271
271
  created: zod_1.z.number(),
272
272
  model: zod_1.z.string(),
273
273
  system_fingerprint: zod_1.z.string().optional().nullable(),
274
- }));
274
+ });
275
275
  exports.OpenAIChatResponseFormat = {
276
276
  /**
277
277
  * Returns the response as a JSON object.
278
278
  */
279
279
  json: {
280
280
  stream: false,
281
- handler: (0, postToApi_js_1.createJsonResponseHandler)(openAIChatResponseSchema),
281
+ handler: (0, postToApi_js_1.createJsonResponseHandler)((0, ZodSchema_js_1.zodSchema)(openAIChatResponseSchema)),
282
282
  },
283
283
  /**
284
284
  * Returns an async iterable over the text deltas (only the tex different of the first choice).
285
285
  */
286
286
  deltaIterable: {
287
287
  stream: true,
288
- handler: (0, createEventSourceResponseHandler_js_1.createEventSourceResponseHandler)(openaiChatChunkSchema),
288
+ handler: (0, createEventSourceResponseHandler_js_1.createEventSourceResponseHandler)((0, ZodSchema_js_1.zodSchema)(openaiChatChunkSchema)),
289
289
  },
290
290
  };
@@ -461,7 +461,153 @@ declare const openAIChatResponseSchema: z.ZodObject<{
461
461
  system_fingerprint?: string | null | undefined;
462
462
  }>;
463
463
  export type OpenAIChatResponse = z.infer<typeof openAIChatResponseSchema>;
464
- declare const openaiChatChunkSchema: import("../../core/schema/ZodSchema.js").ZodSchema<{
464
+ declare const openaiChatChunkSchema: z.ZodObject<{
465
+ object: z.ZodLiteral<"chat.completion.chunk">;
466
+ id: z.ZodString;
467
+ choices: z.ZodArray<z.ZodObject<{
468
+ delta: z.ZodObject<{
469
+ role: z.ZodOptional<z.ZodEnum<["assistant", "user"]>>;
470
+ content: z.ZodOptional<z.ZodNullable<z.ZodString>>;
471
+ function_call: z.ZodOptional<z.ZodObject<{
472
+ name: z.ZodOptional<z.ZodString>;
473
+ arguments: z.ZodOptional<z.ZodString>;
474
+ }, "strip", z.ZodTypeAny, {
475
+ name?: string | undefined;
476
+ arguments?: string | undefined;
477
+ }, {
478
+ name?: string | undefined;
479
+ arguments?: string | undefined;
480
+ }>>;
481
+ tool_calls: z.ZodOptional<z.ZodArray<z.ZodObject<{
482
+ id: z.ZodString;
483
+ type: z.ZodLiteral<"function">;
484
+ function: z.ZodObject<{
485
+ name: z.ZodString;
486
+ arguments: z.ZodString;
487
+ }, "strip", z.ZodTypeAny, {
488
+ name: string;
489
+ arguments: string;
490
+ }, {
491
+ name: string;
492
+ arguments: string;
493
+ }>;
494
+ }, "strip", z.ZodTypeAny, {
495
+ function: {
496
+ name: string;
497
+ arguments: string;
498
+ };
499
+ type: "function";
500
+ id: string;
501
+ }, {
502
+ function: {
503
+ name: string;
504
+ arguments: string;
505
+ };
506
+ type: "function";
507
+ id: string;
508
+ }>, "many">>;
509
+ }, "strip", z.ZodTypeAny, {
510
+ role?: "user" | "assistant" | undefined;
511
+ content?: string | null | undefined;
512
+ function_call?: {
513
+ name?: string | undefined;
514
+ arguments?: string | undefined;
515
+ } | undefined;
516
+ tool_calls?: {
517
+ function: {
518
+ name: string;
519
+ arguments: string;
520
+ };
521
+ type: "function";
522
+ id: string;
523
+ }[] | undefined;
524
+ }, {
525
+ role?: "user" | "assistant" | undefined;
526
+ content?: string | null | undefined;
527
+ function_call?: {
528
+ name?: string | undefined;
529
+ arguments?: string | undefined;
530
+ } | undefined;
531
+ tool_calls?: {
532
+ function: {
533
+ name: string;
534
+ arguments: string;
535
+ };
536
+ type: "function";
537
+ id: string;
538
+ }[] | undefined;
539
+ }>;
540
+ finish_reason: z.ZodOptional<z.ZodNullable<z.ZodEnum<["stop", "length", "tool_calls", "content_filter", "function_call"]>>>;
541
+ index: z.ZodNumber;
542
+ }, "strip", z.ZodTypeAny, {
543
+ delta: {
544
+ role?: "user" | "assistant" | undefined;
545
+ content?: string | null | undefined;
546
+ function_call?: {
547
+ name?: string | undefined;
548
+ arguments?: string | undefined;
549
+ } | undefined;
550
+ tool_calls?: {
551
+ function: {
552
+ name: string;
553
+ arguments: string;
554
+ };
555
+ type: "function";
556
+ id: string;
557
+ }[] | undefined;
558
+ };
559
+ index: number;
560
+ finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
561
+ }, {
562
+ delta: {
563
+ role?: "user" | "assistant" | undefined;
564
+ content?: string | null | undefined;
565
+ function_call?: {
566
+ name?: string | undefined;
567
+ arguments?: string | undefined;
568
+ } | undefined;
569
+ tool_calls?: {
570
+ function: {
571
+ name: string;
572
+ arguments: string;
573
+ };
574
+ type: "function";
575
+ id: string;
576
+ }[] | undefined;
577
+ };
578
+ index: number;
579
+ finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
580
+ }>, "many">;
581
+ created: z.ZodNumber;
582
+ model: z.ZodString;
583
+ system_fingerprint: z.ZodNullable<z.ZodOptional<z.ZodString>>;
584
+ }, "strip", z.ZodTypeAny, {
585
+ object: "chat.completion.chunk";
586
+ model: string;
587
+ id: string;
588
+ created: number;
589
+ choices: {
590
+ delta: {
591
+ role?: "user" | "assistant" | undefined;
592
+ content?: string | null | undefined;
593
+ function_call?: {
594
+ name?: string | undefined;
595
+ arguments?: string | undefined;
596
+ } | undefined;
597
+ tool_calls?: {
598
+ function: {
599
+ name: string;
600
+ arguments: string;
601
+ };
602
+ type: "function";
603
+ id: string;
604
+ }[] | undefined;
605
+ };
606
+ index: number;
607
+ finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
608
+ }[];
609
+ system_fingerprint?: string | null | undefined;
610
+ }, {
465
611
  object: "chat.completion.chunk";
466
612
  model: string;
467
613
  id: string;
@@ -488,7 +634,7 @@ declare const openaiChatChunkSchema: import("../../core/schema/ZodSchema.js").Zo
488
634
  }[];
489
635
  system_fingerprint?: string | null | undefined;
490
636
  }>;
491
- export type OpenAIChatChunk = (typeof openaiChatChunkSchema)["_type"];
637
+ export type OpenAIChatChunk = z.infer<typeof openaiChatChunkSchema>;
492
638
  export type OpenAIChatResponseFormatType<T> = {
493
639
  stream: boolean;
494
640
  handler: ResponseHandler<T>;
@@ -228,7 +228,7 @@ const openAIChatResponseSchema = z.object({
228
228
  total_tokens: z.number(),
229
229
  }),
230
230
  });
231
- const openaiChatChunkSchema = zodSchema(z.object({
231
+ const openaiChatChunkSchema = z.object({
232
232
  object: z.literal("chat.completion.chunk"),
233
233
  id: z.string(),
234
234
  choices: z.array(z.object({
@@ -267,20 +267,20 @@ const openaiChatChunkSchema = zodSchema(z.object({
267
267
  created: z.number(),
268
268
  model: z.string(),
269
269
  system_fingerprint: z.string().optional().nullable(),
270
- }));
270
+ });
271
271
  export const OpenAIChatResponseFormat = {
272
272
  /**
273
273
  * Returns the response as a JSON object.
274
274
  */
275
275
  json: {
276
276
  stream: false,
277
- handler: createJsonResponseHandler(openAIChatResponseSchema),
277
+ handler: createJsonResponseHandler(zodSchema(openAIChatResponseSchema)),
278
278
  },
279
279
  /**
280
280
  * Returns an async iterable over the text deltas (only the tex different of the first choice).
281
281
  */
282
282
  deltaIterable: {
283
283
  stream: true,
284
- handler: createEventSourceResponseHandler(openaiChatChunkSchema),
284
+ handler: createEventSourceResponseHandler(zodSchema(openaiChatChunkSchema)),
285
285
  },
286
286
  };
@@ -34,34 +34,32 @@ class AbstractOpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
34
34
  return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
35
35
  retry: api.retry,
36
36
  throttle: api.throttle,
37
- call: async () => {
38
- return (0, postToApi_js_1.postJsonToApi)({
39
- url: api.assembleUrl("/completions"),
40
- headers: api.headers,
41
- body: {
42
- stream: openaiResponseFormat.stream,
43
- model: this.settings.model,
44
- prompt,
45
- suffix: this.settings.suffix,
46
- max_tokens: this.settings.maxGenerationTokens,
47
- temperature: this.settings.temperature,
48
- top_p: this.settings.topP,
49
- n: this.settings.numberOfGenerations,
50
- logprobs: this.settings.logprobs,
51
- echo: this.settings.echo,
52
- stop: stopSequences,
53
- seed: this.settings.seed,
54
- presence_penalty: this.settings.presencePenalty,
55
- frequency_penalty: this.settings.frequencyPenalty,
56
- best_of: this.settings.bestOf,
57
- logit_bias: this.settings.logitBias,
58
- user,
59
- },
60
- failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
61
- successfulResponseHandler: openaiResponseFormat.handler,
62
- abortSignal,
63
- });
64
- },
37
+ call: async () => (0, postToApi_js_1.postJsonToApi)({
38
+ url: api.assembleUrl("/completions"),
39
+ headers: api.headers,
40
+ body: {
41
+ stream: openaiResponseFormat.stream,
42
+ model: this.settings.model,
43
+ prompt,
44
+ suffix: this.settings.suffix,
45
+ max_tokens: this.settings.maxGenerationTokens,
46
+ temperature: this.settings.temperature,
47
+ top_p: this.settings.topP,
48
+ n: this.settings.numberOfGenerations,
49
+ logprobs: this.settings.logprobs,
50
+ echo: this.settings.echo,
51
+ stop: stopSequences,
52
+ seed: this.settings.seed,
53
+ presence_penalty: this.settings.presencePenalty,
54
+ frequency_penalty: this.settings.frequencyPenalty,
55
+ best_of: this.settings.bestOf,
56
+ logit_bias: this.settings.logitBias,
57
+ user,
58
+ },
59
+ failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
60
+ successfulResponseHandler: openaiResponseFormat.handler,
61
+ abortSignal,
62
+ }),
65
63
  });
66
64
  }
67
65
  async doGenerateTexts(prompt, options) {
@@ -133,7 +131,7 @@ const OpenAICompletionResponseSchema = zod_1.z.object({
133
131
  total_tokens: zod_1.z.number(),
134
132
  }),
135
133
  });
136
- const openaiCompletionStreamChunkSchema = (0, ZodSchema_js_1.zodSchema)(zod_1.z.object({
134
+ const openaiCompletionStreamChunkSchema = zod_1.z.object({
137
135
  choices: zod_1.z.array(zod_1.z.object({
138
136
  text: zod_1.z.string(),
139
137
  finish_reason: zod_1.z
@@ -147,14 +145,14 @@ const openaiCompletionStreamChunkSchema = (0, ZodSchema_js_1.zodSchema)(zod_1.z.
147
145
  model: zod_1.z.string(),
148
146
  system_fingerprint: zod_1.z.string().optional(),
149
147
  object: zod_1.z.literal("text_completion"),
150
- }));
148
+ });
151
149
  exports.OpenAITextResponseFormat = {
152
150
  /**
153
151
  * Returns the response as a JSON object.
154
152
  */
155
153
  json: {
156
154
  stream: false,
157
- handler: (0, postToApi_js_1.createJsonResponseHandler)(OpenAICompletionResponseSchema),
155
+ handler: (0, postToApi_js_1.createJsonResponseHandler)((0, ZodSchema_js_1.zodSchema)(OpenAICompletionResponseSchema)),
158
156
  },
159
157
  /**
160
158
  * Returns an async iterable over the full deltas (all choices, including full current state at time of event)
@@ -162,6 +160,6 @@ exports.OpenAITextResponseFormat = {
162
160
  */
163
161
  deltaIterable: {
164
162
  stream: true,
165
- handler: (0, createEventSourceResponseHandler_js_1.createEventSourceResponseHandler)(openaiCompletionStreamChunkSchema),
163
+ handler: (0, createEventSourceResponseHandler_js_1.createEventSourceResponseHandler)((0, ZodSchema_js_1.zodSchema)(openaiCompletionStreamChunkSchema)),
166
164
  },
167
165
  };
@@ -31,34 +31,32 @@ export class AbstractOpenAICompletionModel extends AbstractModel {
31
31
  return callWithRetryAndThrottle({
32
32
  retry: api.retry,
33
33
  throttle: api.throttle,
34
- call: async () => {
35
- return postJsonToApi({
36
- url: api.assembleUrl("/completions"),
37
- headers: api.headers,
38
- body: {
39
- stream: openaiResponseFormat.stream,
40
- model: this.settings.model,
41
- prompt,
42
- suffix: this.settings.suffix,
43
- max_tokens: this.settings.maxGenerationTokens,
44
- temperature: this.settings.temperature,
45
- top_p: this.settings.topP,
46
- n: this.settings.numberOfGenerations,
47
- logprobs: this.settings.logprobs,
48
- echo: this.settings.echo,
49
- stop: stopSequences,
50
- seed: this.settings.seed,
51
- presence_penalty: this.settings.presencePenalty,
52
- frequency_penalty: this.settings.frequencyPenalty,
53
- best_of: this.settings.bestOf,
54
- logit_bias: this.settings.logitBias,
55
- user,
56
- },
57
- failedResponseHandler: failedOpenAICallResponseHandler,
58
- successfulResponseHandler: openaiResponseFormat.handler,
59
- abortSignal,
60
- });
61
- },
34
+ call: async () => postJsonToApi({
35
+ url: api.assembleUrl("/completions"),
36
+ headers: api.headers,
37
+ body: {
38
+ stream: openaiResponseFormat.stream,
39
+ model: this.settings.model,
40
+ prompt,
41
+ suffix: this.settings.suffix,
42
+ max_tokens: this.settings.maxGenerationTokens,
43
+ temperature: this.settings.temperature,
44
+ top_p: this.settings.topP,
45
+ n: this.settings.numberOfGenerations,
46
+ logprobs: this.settings.logprobs,
47
+ echo: this.settings.echo,
48
+ stop: stopSequences,
49
+ seed: this.settings.seed,
50
+ presence_penalty: this.settings.presencePenalty,
51
+ frequency_penalty: this.settings.frequencyPenalty,
52
+ best_of: this.settings.bestOf,
53
+ logit_bias: this.settings.logitBias,
54
+ user,
55
+ },
56
+ failedResponseHandler: failedOpenAICallResponseHandler,
57
+ successfulResponseHandler: openaiResponseFormat.handler,
58
+ abortSignal,
59
+ }),
62
60
  });
63
61
  }
64
62
  async doGenerateTexts(prompt, options) {
@@ -129,7 +127,7 @@ const OpenAICompletionResponseSchema = z.object({
129
127
  total_tokens: z.number(),
130
128
  }),
131
129
  });
132
- const openaiCompletionStreamChunkSchema = zodSchema(z.object({
130
+ const openaiCompletionStreamChunkSchema = z.object({
133
131
  choices: z.array(z.object({
134
132
  text: z.string(),
135
133
  finish_reason: z
@@ -143,14 +141,14 @@ const openaiCompletionStreamChunkSchema = zodSchema(z.object({
143
141
  model: z.string(),
144
142
  system_fingerprint: z.string().optional(),
145
143
  object: z.literal("text_completion"),
146
- }));
144
+ });
147
145
  export const OpenAITextResponseFormat = {
148
146
  /**
149
147
  * Returns the response as a JSON object.
150
148
  */
151
149
  json: {
152
150
  stream: false,
153
- handler: createJsonResponseHandler(OpenAICompletionResponseSchema),
151
+ handler: createJsonResponseHandler(zodSchema(OpenAICompletionResponseSchema)),
154
152
  },
155
153
  /**
156
154
  * Returns an async iterable over the full deltas (all choices, including full current state at time of event)
@@ -158,6 +156,6 @@ export const OpenAITextResponseFormat = {
158
156
  */
159
157
  deltaIterable: {
160
158
  stream: true,
161
- handler: createEventSourceResponseHandler(openaiCompletionStreamChunkSchema),
159
+ handler: createEventSourceResponseHandler(zodSchema(openaiCompletionStreamChunkSchema)),
162
160
  },
163
161
  };
@@ -1,6 +1,14 @@
1
1
  import { AbstractApiConfiguration } from "../../core/api/AbstractApiConfiguration.js";
2
2
  import { RetryFunction } from "../../core/api/RetryFunction.js";
3
3
  import { ThrottleFunction } from "../../core/api/ThrottleFunction.js";
4
+ export type AzureOpenAIApiConfigurationOptions = {
5
+ resourceName: string;
6
+ deploymentId: string;
7
+ apiVersion: string;
8
+ apiKey?: string;
9
+ retry?: RetryFunction;
10
+ throttle?: ThrottleFunction;
11
+ };
4
12
  /**
5
13
  * Configuration for the Azure OpenAI API. This class is responsible for constructing URLs specific to the Azure OpenAI deployment.
6
14
  * It creates URLs of the form
@@ -13,13 +21,6 @@ export declare class AzureOpenAIApiConfiguration extends AbstractApiConfiguratio
13
21
  readonly deploymentId: string;
14
22
  readonly apiVersion: string;
15
23
  readonly headers: Record<string, string>;
16
- constructor({ resourceName, deploymentId, apiVersion, apiKey, retry, throttle, }: {
17
- resourceName: string;
18
- deploymentId: string;
19
- apiVersion: string;
20
- apiKey?: string;
21
- retry?: RetryFunction;
22
- throttle?: ThrottleFunction;
23
- });
24
+ constructor({ resourceName, deploymentId, apiVersion, apiKey, retry, throttle, }: AzureOpenAIApiConfigurationOptions);
24
25
  assembleUrl(path: string): string;
25
26
  }
@@ -3,19 +3,27 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.OpenAIApiConfiguration = void 0;
4
4
  const BaseUrlApiConfiguration_js_1 = require("../../core/api/BaseUrlApiConfiguration.cjs");
5
5
  const loadApiKey_js_1 = require("../../core/api/loadApiKey.cjs");
6
- class OpenAIApiConfiguration extends BaseUrlApiConfiguration_js_1.BaseUrlApiConfiguration {
7
- constructor({ baseUrl = "https://api.openai.com/v1", apiKey, retry, throttle, } = {}) {
6
+ /**
7
+ * Creates an API configuration for the OpenAI API.
8
+ * It calls the API at https://api.openai.com/v1 and uses the `OPENAI_API_KEY` env variable by default.
9
+ */
10
+ class OpenAIApiConfiguration extends BaseUrlApiConfiguration_js_1.BaseUrlApiConfigurationWithDefaults {
11
+ constructor(settings = {}) {
8
12
  super({
9
- baseUrl,
13
+ ...settings,
10
14
  headers: {
11
15
  Authorization: `Bearer ${(0, loadApiKey_js_1.loadApiKey)({
12
- apiKey,
16
+ apiKey: settings.apiKey,
13
17
  environmentVariableName: "OPENAI_API_KEY",
14
18
  description: "OpenAI",
15
19
  })}`,
16
20
  },
17
- retry,
18
- throttle,
21
+ baseUrlDefaults: {
22
+ protocol: "https",
23
+ host: "api.openai.com",
24
+ port: "443",
25
+ path: "/v1",
26
+ },
19
27
  });
20
28
  }
21
29
  }
@@ -1,11 +1,10 @@
1
- import { BaseUrlApiConfiguration } from "../../core/api/BaseUrlApiConfiguration.js";
2
- import { RetryFunction } from "../../core/api/RetryFunction.js";
3
- import { ThrottleFunction } from "../../core/api/ThrottleFunction.js";
4
- export declare class OpenAIApiConfiguration extends BaseUrlApiConfiguration {
5
- constructor({ baseUrl, apiKey, retry, throttle, }?: {
6
- baseUrl?: string;
1
+ import { BaseUrlApiConfigurationWithDefaults, PartialBaseUrlPartsApiConfigurationOptions } from "../../core/api/BaseUrlApiConfiguration.js";
2
+ /**
3
+ * Creates an API configuration for the OpenAI API.
4
+ * It calls the API at https://api.openai.com/v1 and uses the `OPENAI_API_KEY` env variable by default.
5
+ */
6
+ export declare class OpenAIApiConfiguration extends BaseUrlApiConfigurationWithDefaults {
7
+ constructor(settings?: PartialBaseUrlPartsApiConfigurationOptions & {
7
8
  apiKey?: string;
8
- retry?: RetryFunction;
9
- throttle?: ThrottleFunction;
10
9
  });
11
10
  }
@@ -1,18 +1,26 @@
1
- import { BaseUrlApiConfiguration } from "../../core/api/BaseUrlApiConfiguration.js";
1
+ import { BaseUrlApiConfigurationWithDefaults, } from "../../core/api/BaseUrlApiConfiguration.js";
2
2
  import { loadApiKey } from "../../core/api/loadApiKey.js";
3
- export class OpenAIApiConfiguration extends BaseUrlApiConfiguration {
4
- constructor({ baseUrl = "https://api.openai.com/v1", apiKey, retry, throttle, } = {}) {
3
+ /**
4
+ * Creates an API configuration for the OpenAI API.
5
+ * It calls the API at https://api.openai.com/v1 and uses the `OPENAI_API_KEY` env variable by default.
6
+ */
7
+ export class OpenAIApiConfiguration extends BaseUrlApiConfigurationWithDefaults {
8
+ constructor(settings = {}) {
5
9
  super({
6
- baseUrl,
10
+ ...settings,
7
11
  headers: {
8
12
  Authorization: `Bearer ${loadApiKey({
9
- apiKey,
13
+ apiKey: settings.apiKey,
10
14
  environmentVariableName: "OPENAI_API_KEY",
11
15
  description: "OpenAI",
12
16
  })}`,
13
17
  },
14
- retry,
15
- throttle,
18
+ baseUrlDefaults: {
19
+ protocol: "https",
20
+ host: "api.openai.com",
21
+ port: "443",
22
+ path: "/v1",
23
+ },
16
24
  });
17
25
  }
18
26
  }