modelfusion 0.113.0 → 0.114.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. package/CHANGELOG.md +59 -0
  2. package/README.md +89 -89
  3. package/core/FunctionOptions.d.ts +14 -0
  4. package/core/api/AbstractApiConfiguration.cjs +16 -1
  5. package/core/api/AbstractApiConfiguration.d.ts +7 -3
  6. package/core/api/AbstractApiConfiguration.js +16 -1
  7. package/core/api/ApiConfiguration.d.ts +10 -1
  8. package/core/api/BaseUrlApiConfiguration.cjs +9 -5
  9. package/core/api/BaseUrlApiConfiguration.d.ts +7 -7
  10. package/core/api/BaseUrlApiConfiguration.js +9 -5
  11. package/core/api/CustomHeaderProvider.cjs +2 -0
  12. package/core/api/CustomHeaderProvider.d.ts +2 -0
  13. package/core/api/CustomHeaderProvider.js +1 -0
  14. package/core/api/index.cjs +1 -0
  15. package/core/api/index.d.ts +1 -0
  16. package/core/api/index.js +1 -0
  17. package/core/cache/Cache.cjs +2 -0
  18. package/core/cache/Cache.d.ts +12 -0
  19. package/core/cache/Cache.js +1 -0
  20. package/core/cache/MemoryCache.cjs +23 -0
  21. package/core/cache/MemoryCache.d.ts +15 -0
  22. package/core/cache/MemoryCache.js +19 -0
  23. package/core/cache/index.cjs +18 -0
  24. package/core/cache/index.d.ts +2 -0
  25. package/core/cache/index.js +2 -0
  26. package/core/index.cjs +1 -0
  27. package/core/index.d.ts +1 -0
  28. package/core/index.js +1 -0
  29. package/core/schema/TypeValidationError.cjs +36 -0
  30. package/core/schema/TypeValidationError.d.ts +15 -0
  31. package/core/schema/TypeValidationError.js +32 -0
  32. package/core/schema/index.cjs +2 -0
  33. package/core/schema/index.d.ts +2 -0
  34. package/core/schema/index.js +2 -0
  35. package/core/schema/parseJSON.cjs +6 -14
  36. package/core/schema/parseJSON.d.ts +3 -2
  37. package/core/schema/parseJSON.js +6 -14
  38. package/core/schema/validateTypes.cjs +65 -0
  39. package/core/schema/validateTypes.d.ts +34 -0
  40. package/core/schema/validateTypes.js +60 -0
  41. package/model-function/embed/EmbeddingModel.d.ts +2 -2
  42. package/model-function/executeStandardCall.cjs +3 -1
  43. package/model-function/executeStandardCall.d.ts +2 -2
  44. package/model-function/executeStandardCall.js +3 -1
  45. package/model-function/executeStreamCall.cjs +2 -1
  46. package/model-function/executeStreamCall.d.ts +2 -2
  47. package/model-function/executeStreamCall.js +2 -1
  48. package/model-function/generate-image/ImageGenerationModel.d.ts +2 -2
  49. package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +2 -2
  50. package/model-function/generate-speech/SpeechGenerationModel.d.ts +3 -3
  51. package/model-function/generate-structure/generateStructure.cjs +4 -1
  52. package/model-function/generate-structure/generateStructure.js +4 -1
  53. package/model-function/generate-structure/streamStructure.cjs +4 -1
  54. package/model-function/generate-structure/streamStructure.js +4 -1
  55. package/model-function/generate-text/PromptTemplateTextGenerationModel.cjs +3 -0
  56. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +11 -2
  57. package/model-function/generate-text/PromptTemplateTextGenerationModel.js +3 -0
  58. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +2 -2
  59. package/model-function/generate-text/TextGenerationModel.d.ts +12 -3
  60. package/model-function/generate-text/generateText.cjs +43 -1
  61. package/model-function/generate-text/generateText.js +43 -1
  62. package/model-function/generate-transcription/TranscriptionModel.d.ts +2 -2
  63. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +20 -8
  64. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +27 -5
  65. package/model-provider/anthropic/AnthropicTextGenerationModel.js +20 -8
  66. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +8 -3
  67. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +3 -3
  68. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +8 -3
  69. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +8 -3
  70. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
  71. package/model-provider/cohere/CohereTextEmbeddingModel.js +8 -3
  72. package/model-provider/cohere/CohereTextGenerationModel.cjs +20 -8
  73. package/model-provider/cohere/CohereTextGenerationModel.d.ts +45 -5
  74. package/model-provider/cohere/CohereTextGenerationModel.js +20 -8
  75. package/model-provider/cohere/CohereTokenizer.cjs +16 -6
  76. package/model-provider/cohere/CohereTokenizer.d.ts +3 -3
  77. package/model-provider/cohere/CohereTokenizer.js +16 -6
  78. package/model-provider/elevenlabs/ElevenLabsApiConfiguration.cjs +1 -1
  79. package/model-provider/elevenlabs/ElevenLabsApiConfiguration.js +1 -1
  80. package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +8 -3
  81. package/model-provider/elevenlabs/ElevenLabsSpeechModel.d.ts +2 -2
  82. package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +8 -3
  83. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +8 -3
  84. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +3 -3
  85. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +8 -3
  86. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +18 -4
  87. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +21 -3
  88. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +18 -4
  89. package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +20 -8
  90. package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +125 -5
  91. package/model-provider/llamacpp/LlamaCppCompletionModel.js +20 -8
  92. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +8 -3
  93. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +3 -3
  94. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +8 -3
  95. package/model-provider/llamacpp/LlamaCppTokenizer.cjs +8 -3
  96. package/model-provider/llamacpp/LlamaCppTokenizer.d.ts +2 -2
  97. package/model-provider/llamacpp/LlamaCppTokenizer.js +8 -3
  98. package/model-provider/lmnt/LmntSpeechModel.cjs +8 -3
  99. package/model-provider/lmnt/LmntSpeechModel.d.ts +2 -2
  100. package/model-provider/lmnt/LmntSpeechModel.js +8 -3
  101. package/model-provider/mistral/MistralChatModel.cjs +20 -8
  102. package/model-provider/mistral/MistralChatModel.d.ts +55 -5
  103. package/model-provider/mistral/MistralChatModel.js +20 -8
  104. package/model-provider/mistral/MistralTextEmbeddingModel.cjs +8 -3
  105. package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +3 -3
  106. package/model-provider/mistral/MistralTextEmbeddingModel.js +8 -3
  107. package/model-provider/ollama/OllamaChatModel.cjs +20 -8
  108. package/model-provider/ollama/OllamaChatModel.d.ts +27 -5
  109. package/model-provider/ollama/OllamaChatModel.js +20 -8
  110. package/model-provider/ollama/OllamaCompletionModel.cjs +20 -7
  111. package/model-provider/ollama/OllamaCompletionModel.d.ts +43 -5
  112. package/model-provider/ollama/OllamaCompletionModel.js +20 -7
  113. package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +8 -3
  114. package/model-provider/ollama/OllamaTextEmbeddingModel.d.ts +3 -3
  115. package/model-provider/ollama/OllamaTextEmbeddingModel.js +8 -3
  116. package/model-provider/openai/AbstractOpenAIChatModel.cjs +23 -13
  117. package/model-provider/openai/AbstractOpenAIChatModel.d.ts +94 -7
  118. package/model-provider/openai/AbstractOpenAIChatModel.js +23 -13
  119. package/model-provider/openai/AbstractOpenAICompletionModel.cjs +21 -9
  120. package/model-provider/openai/AbstractOpenAICompletionModel.d.ts +35 -5
  121. package/model-provider/openai/AbstractOpenAICompletionModel.js +21 -9
  122. package/model-provider/openai/AzureOpenAIApiConfiguration.cjs +5 -2
  123. package/model-provider/openai/AzureOpenAIApiConfiguration.d.ts +2 -1
  124. package/model-provider/openai/AzureOpenAIApiConfiguration.js +5 -2
  125. package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.cjs +12 -6
  126. package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.d.ts +89 -5
  127. package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.js +12 -6
  128. package/model-provider/openai/OpenAIImageGenerationModel.cjs +10 -6
  129. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +4 -4
  130. package/model-provider/openai/OpenAIImageGenerationModel.js +10 -6
  131. package/model-provider/openai/OpenAISpeechModel.cjs +9 -4
  132. package/model-provider/openai/OpenAISpeechModel.d.ts +3 -3
  133. package/model-provider/openai/OpenAISpeechModel.js +9 -4
  134. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +11 -6
  135. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +3 -3
  136. package/model-provider/openai/OpenAITextEmbeddingModel.js +11 -6
  137. package/model-provider/openai/OpenAITranscriptionModel.cjs +9 -6
  138. package/model-provider/openai/OpenAITranscriptionModel.d.ts +4 -4
  139. package/model-provider/openai/OpenAITranscriptionModel.js +9 -6
  140. package/model-provider/stability/StabilityImageGenerationModel.cjs +10 -5
  141. package/model-provider/stability/StabilityImageGenerationModel.d.ts +3 -3
  142. package/model-provider/stability/StabilityImageGenerationModel.js +10 -5
  143. package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +9 -7
  144. package/model-provider/whispercpp/WhisperCppTranscriptionModel.d.ts +3 -3
  145. package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +9 -7
  146. package/observability/helicone/HeliconeOpenAIApiConfiguration.cjs +2 -1
  147. package/observability/helicone/HeliconeOpenAIApiConfiguration.d.ts +3 -1
  148. package/observability/helicone/HeliconeOpenAIApiConfiguration.js +2 -1
  149. package/package.json +1 -1
@@ -1,5 +1,5 @@
1
1
  import { z } from "zod";
2
- import { FunctionOptions } from "../../core/FunctionOptions.js";
2
+ import { FunctionCallOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
@@ -82,15 +82,14 @@ export type OpenAIChatPrompt = OpenAIChatMessage[];
82
82
  */
83
83
  export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractOpenAIChatSettings> extends AbstractModel<SETTINGS> {
84
84
  constructor(settings: SETTINGS);
85
- callAPI<RESULT>(messages: OpenAIChatPrompt, options: {
85
+ callAPI<RESULT>(messages: OpenAIChatPrompt, callOptions: FunctionCallOptions, options: {
86
86
  responseFormat: OpenAIChatResponseFormatType<RESULT>;
87
- } & FunctionOptions & {
88
87
  functions?: AbstractOpenAIChatSettings["functions"];
89
88
  functionCall?: AbstractOpenAIChatSettings["functionCall"];
90
89
  tools?: AbstractOpenAIChatSettings["tools"];
91
90
  toolChoice?: AbstractOpenAIChatSettings["toolChoice"];
92
91
  }): Promise<RESULT>;
93
- doGenerateTexts(prompt: OpenAIChatPrompt, options?: FunctionOptions): Promise<{
92
+ doGenerateTexts(prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<{
94
93
  response: {
95
94
  object: "chat.completion";
96
95
  model: string;
@@ -134,8 +133,96 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
134
133
  totalTokens: number;
135
134
  };
136
135
  }>;
136
+ restoreGeneratedTexts(rawResponse: unknown): {
137
+ response: {
138
+ object: "chat.completion";
139
+ model: string;
140
+ usage: {
141
+ prompt_tokens: number;
142
+ completion_tokens: number;
143
+ total_tokens: number;
144
+ };
145
+ id: string;
146
+ created: number;
147
+ choices: {
148
+ message: {
149
+ role: "assistant";
150
+ content: string | null;
151
+ function_call?: {
152
+ name: string;
153
+ arguments: string;
154
+ } | undefined;
155
+ tool_calls?: {
156
+ function: {
157
+ name: string;
158
+ arguments: string;
159
+ };
160
+ type: "function";
161
+ id: string;
162
+ }[] | undefined;
163
+ };
164
+ index: number;
165
+ logprobs?: any;
166
+ finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
167
+ }[];
168
+ system_fingerprint?: string | null | undefined;
169
+ };
170
+ textGenerationResults: {
171
+ text: string;
172
+ finishReason: TextGenerationFinishReason;
173
+ }[];
174
+ usage: {
175
+ promptTokens: number;
176
+ completionTokens: number;
177
+ totalTokens: number;
178
+ };
179
+ };
180
+ processTextGenerationResponse(response: OpenAIChatResponse): {
181
+ response: {
182
+ object: "chat.completion";
183
+ model: string;
184
+ usage: {
185
+ prompt_tokens: number;
186
+ completion_tokens: number;
187
+ total_tokens: number;
188
+ };
189
+ id: string;
190
+ created: number;
191
+ choices: {
192
+ message: {
193
+ role: "assistant";
194
+ content: string | null;
195
+ function_call?: {
196
+ name: string;
197
+ arguments: string;
198
+ } | undefined;
199
+ tool_calls?: {
200
+ function: {
201
+ name: string;
202
+ arguments: string;
203
+ };
204
+ type: "function";
205
+ id: string;
206
+ }[] | undefined;
207
+ };
208
+ index: number;
209
+ logprobs?: any;
210
+ finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
211
+ }[];
212
+ system_fingerprint?: string | null | undefined;
213
+ };
214
+ textGenerationResults: {
215
+ text: string;
216
+ finishReason: TextGenerationFinishReason;
217
+ }[];
218
+ usage: {
219
+ promptTokens: number;
220
+ completionTokens: number;
221
+ totalTokens: number;
222
+ };
223
+ };
137
224
  private translateFinishReason;
138
- doStreamText(prompt: OpenAIChatPrompt, options?: FunctionOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
225
+ doStreamText(prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
139
226
  object: "chat.completion.chunk";
140
227
  model: string;
141
228
  id: string;
@@ -163,7 +250,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
163
250
  system_fingerprint?: string | null | undefined;
164
251
  }>>>;
165
252
  extractTextDelta(delta: unknown): string | undefined;
166
- doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: OpenAIChatPrompt, options?: FunctionOptions): Promise<{
253
+ doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<{
167
254
  response: {
168
255
  object: "chat.completion";
169
256
  model: string;
@@ -207,7 +294,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
207
294
  totalTokens: number;
208
295
  };
209
296
  }>;
210
- doGenerateToolCalls(tools: Array<ToolDefinition<string, unknown>>, prompt: OpenAIChatPrompt, options?: FunctionOptions): Promise<{
297
+ doGenerateToolCalls(tools: Array<ToolDefinition<string, unknown>>, prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<{
211
298
  response: {
212
299
  object: "chat.completion";
213
300
  model: string;
@@ -3,6 +3,7 @@ import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottl
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
4
  import { zodSchema } from "../../core/schema/ZodSchema.js";
5
5
  import { parseJSON } from "../../core/schema/parseJSON.js";
6
+ import { validateTypes } from "../../core/schema/validateTypes.js";
6
7
  import { AbstractModel } from "../../model-function/AbstractModel.js";
7
8
  import { createEventSourceResponseHandler } from "../../util/streaming/createEventSourceResponseHandler.js";
8
9
  import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
@@ -16,12 +17,12 @@ export class AbstractOpenAIChatModel extends AbstractModel {
16
17
  constructor(settings) {
17
18
  super({ settings });
18
19
  }
19
- async callAPI(messages, options) {
20
+ async callAPI(messages, callOptions, options) {
20
21
  const api = this.settings.api ?? new OpenAIApiConfiguration();
21
22
  const responseFormat = options.responseFormat;
22
- const abortSignal = options.run?.abortSignal;
23
+ const abortSignal = callOptions.run?.abortSignal;
23
24
  const user = this.settings.isUserIdForwardingEnabled
24
- ? options.run?.userId
25
+ ? callOptions.run?.userId
25
26
  : undefined;
26
27
  const openAIResponseFormat = this.settings.responseFormat;
27
28
  // function & tool calling:
@@ -42,7 +43,12 @@ export class AbstractOpenAIChatModel extends AbstractModel {
42
43
  }
43
44
  return postJsonToApi({
44
45
  url: api.assembleUrl("/chat/completions"),
45
- headers: api.headers,
46
+ headers: api.headers({
47
+ functionType: callOptions.functionType,
48
+ functionId: callOptions.functionId,
49
+ run: callOptions.run,
50
+ callId: callOptions.callId,
51
+ }),
46
52
  body: {
47
53
  stream: responseFormat.stream,
48
54
  model: this.settings.model,
@@ -71,10 +77,17 @@ export class AbstractOpenAIChatModel extends AbstractModel {
71
77
  });
72
78
  }
73
79
  async doGenerateTexts(prompt, options) {
74
- const response = await this.callAPI(prompt, {
75
- ...options,
80
+ return this.processTextGenerationResponse(await this.callAPI(prompt, options, {
76
81
  responseFormat: OpenAIChatResponseFormat.json,
77
- });
82
+ }));
83
+ }
84
+ restoreGeneratedTexts(rawResponse) {
85
+ return this.processTextGenerationResponse(validateTypes({
86
+ structure: rawResponse,
87
+ schema: zodSchema(openAIChatResponseSchema),
88
+ }));
89
+ }
90
+ processTextGenerationResponse(response) {
78
91
  return {
79
92
  response,
80
93
  textGenerationResults: response.choices.map((choice) => ({
@@ -100,8 +113,7 @@ export class AbstractOpenAIChatModel extends AbstractModel {
100
113
  }
101
114
  }
102
115
  doStreamText(prompt, options) {
103
- return this.callAPI(prompt, {
104
- ...options,
116
+ return this.callAPI(prompt, options, {
105
117
  responseFormat: OpenAIChatResponseFormat.deltaIterable,
106
118
  });
107
119
  }
@@ -118,8 +130,7 @@ export class AbstractOpenAIChatModel extends AbstractModel {
118
130
  return firstChoice.delta.content ?? undefined;
119
131
  }
120
132
  async doGenerateToolCall(tool, prompt, options) {
121
- const response = await this.callAPI(prompt, {
122
- ...options,
133
+ const response = await this.callAPI(prompt, options, {
123
134
  responseFormat: OpenAIChatResponseFormat.json,
124
135
  toolChoice: {
125
136
  type: "function",
@@ -149,8 +160,7 @@ export class AbstractOpenAIChatModel extends AbstractModel {
149
160
  };
150
161
  }
151
162
  async doGenerateToolCalls(tools, prompt, options) {
152
- const response = await this.callAPI(prompt, {
153
- ...options,
163
+ const response = await this.callAPI(prompt, options, {
154
164
  responseFormat: OpenAIChatResponseFormat.json,
155
165
  toolChoice: "auto",
156
166
  tools: tools.map((tool) => ({
@@ -5,6 +5,7 @@ const zod_1 = require("zod");
5
5
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
7
  const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
8
+ const validateTypes_js_1 = require("../../core/schema/validateTypes.cjs");
8
9
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
9
10
  const createEventSourceResponseHandler_js_1 = require("../../util/streaming/createEventSourceResponseHandler.cjs");
10
11
  const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
@@ -18,12 +19,12 @@ class AbstractOpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
18
19
  constructor(settings) {
19
20
  super({ settings });
20
21
  }
21
- async callAPI(prompt, options) {
22
+ async callAPI(prompt, callOptions, options) {
22
23
  const api = this.settings.api ?? new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration();
23
24
  const user = this.settings.isUserIdForwardingEnabled
24
- ? options.run?.userId
25
+ ? callOptions.run?.userId
25
26
  : undefined;
26
- const abortSignal = options.run?.abortSignal;
27
+ const abortSignal = callOptions.run?.abortSignal;
27
28
  const openaiResponseFormat = options.responseFormat;
28
29
  // empty arrays are not allowed for stop:
29
30
  const stopSequences = this.settings.stopSequences != null &&
@@ -36,7 +37,12 @@ class AbstractOpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
36
37
  throttle: api.throttle,
37
38
  call: async () => (0, postToApi_js_1.postJsonToApi)({
38
39
  url: api.assembleUrl("/completions"),
39
- headers: api.headers,
40
+ headers: api.headers({
41
+ functionType: callOptions.functionType,
42
+ functionId: callOptions.functionId,
43
+ run: callOptions.run,
44
+ callId: callOptions.callId,
45
+ }),
40
46
  body: {
41
47
  stream: openaiResponseFormat.stream,
42
48
  model: this.settings.model,
@@ -63,10 +69,17 @@ class AbstractOpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
63
69
  });
64
70
  }
65
71
  async doGenerateTexts(prompt, options) {
66
- const response = await this.callAPI(prompt, {
67
- ...options,
72
+ return this.processTextGenerationResponse(await this.callAPI(prompt, options, {
68
73
  responseFormat: exports.OpenAITextResponseFormat.json,
69
- });
74
+ }));
75
+ }
76
+ restoreGeneratedTexts(rawResponse) {
77
+ return this.processTextGenerationResponse((0, validateTypes_js_1.validateTypes)({
78
+ structure: rawResponse,
79
+ schema: (0, ZodSchema_js_1.zodSchema)(OpenAICompletionResponseSchema),
80
+ }));
81
+ }
82
+ processTextGenerationResponse(response) {
70
83
  return {
71
84
  response,
72
85
  textGenerationResults: response.choices.map((choice) => {
@@ -95,8 +108,7 @@ class AbstractOpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
95
108
  }
96
109
  }
97
110
  doStreamText(prompt, options) {
98
- return this.callAPI(prompt, {
99
- ...options,
111
+ return this.callAPI(prompt, options, {
100
112
  responseFormat: exports.OpenAITextResponseFormat.deltaIterable,
101
113
  });
102
114
  }
@@ -1,5 +1,5 @@
1
1
  import { z } from "zod";
2
- import { FunctionOptions } from "../../core/FunctionOptions.js";
2
+ import { FunctionCallOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
@@ -27,10 +27,10 @@ export interface AbstractOpenAICompletionModelSettings extends TextGenerationMod
27
27
  */
28
28
  export declare abstract class AbstractOpenAICompletionModel<SETTINGS extends AbstractOpenAICompletionModelSettings> extends AbstractModel<SETTINGS> {
29
29
  constructor(settings: SETTINGS);
30
- callAPI<RESULT>(prompt: string, options: {
30
+ callAPI<RESULT>(prompt: string, callOptions: FunctionCallOptions, options: {
31
31
  responseFormat: OpenAITextResponseFormatType<RESULT>;
32
- } & FunctionOptions): Promise<RESULT>;
33
- doGenerateTexts(prompt: string, options?: FunctionOptions): Promise<{
32
+ }): Promise<RESULT>;
33
+ doGenerateTexts(prompt: string, options: FunctionCallOptions): Promise<{
34
34
  response: {
35
35
  object: "text_completion";
36
36
  model: string;
@@ -59,8 +59,38 @@ export declare abstract class AbstractOpenAICompletionModel<SETTINGS extends Abs
59
59
  totalTokens: number;
60
60
  };
61
61
  }>;
62
+ restoreGeneratedTexts(rawResponse: unknown): {
63
+ response: {
64
+ object: "text_completion";
65
+ model: string;
66
+ usage: {
67
+ prompt_tokens: number;
68
+ completion_tokens: number;
69
+ total_tokens: number;
70
+ };
71
+ id: string;
72
+ created: number;
73
+ choices: {
74
+ text: string;
75
+ index: number;
76
+ finish_reason?: "length" | "stop" | "content_filter" | null | undefined;
77
+ logprobs?: any;
78
+ }[];
79
+ system_fingerprint?: string | undefined;
80
+ };
81
+ textGenerationResults: {
82
+ finishReason: TextGenerationFinishReason;
83
+ text: string;
84
+ }[];
85
+ usage: {
86
+ promptTokens: number;
87
+ completionTokens: number;
88
+ totalTokens: number;
89
+ };
90
+ };
91
+ private processTextGenerationResponse;
62
92
  private translateFinishReason;
63
- doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
93
+ doStreamText(prompt: string, options: FunctionCallOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
64
94
  object: "text_completion";
65
95
  model: string;
66
96
  id: string;
@@ -2,6 +2,7 @@ import { z } from "zod";
2
2
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
4
  import { zodSchema } from "../../core/schema/ZodSchema.js";
5
+ import { validateTypes } from "../../core/schema/validateTypes.js";
5
6
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
7
  import { createEventSourceResponseHandler } from "../../util/streaming/createEventSourceResponseHandler.js";
7
8
  import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
@@ -15,12 +16,12 @@ export class AbstractOpenAICompletionModel extends AbstractModel {
15
16
  constructor(settings) {
16
17
  super({ settings });
17
18
  }
18
- async callAPI(prompt, options) {
19
+ async callAPI(prompt, callOptions, options) {
19
20
  const api = this.settings.api ?? new OpenAIApiConfiguration();
20
21
  const user = this.settings.isUserIdForwardingEnabled
21
- ? options.run?.userId
22
+ ? callOptions.run?.userId
22
23
  : undefined;
23
- const abortSignal = options.run?.abortSignal;
24
+ const abortSignal = callOptions.run?.abortSignal;
24
25
  const openaiResponseFormat = options.responseFormat;
25
26
  // empty arrays are not allowed for stop:
26
27
  const stopSequences = this.settings.stopSequences != null &&
@@ -33,7 +34,12 @@ export class AbstractOpenAICompletionModel extends AbstractModel {
33
34
  throttle: api.throttle,
34
35
  call: async () => postJsonToApi({
35
36
  url: api.assembleUrl("/completions"),
36
- headers: api.headers,
37
+ headers: api.headers({
38
+ functionType: callOptions.functionType,
39
+ functionId: callOptions.functionId,
40
+ run: callOptions.run,
41
+ callId: callOptions.callId,
42
+ }),
37
43
  body: {
38
44
  stream: openaiResponseFormat.stream,
39
45
  model: this.settings.model,
@@ -60,10 +66,17 @@ export class AbstractOpenAICompletionModel extends AbstractModel {
60
66
  });
61
67
  }
62
68
  async doGenerateTexts(prompt, options) {
63
- const response = await this.callAPI(prompt, {
64
- ...options,
69
+ return this.processTextGenerationResponse(await this.callAPI(prompt, options, {
65
70
  responseFormat: OpenAITextResponseFormat.json,
66
- });
71
+ }));
72
+ }
73
+ restoreGeneratedTexts(rawResponse) {
74
+ return this.processTextGenerationResponse(validateTypes({
75
+ structure: rawResponse,
76
+ schema: zodSchema(OpenAICompletionResponseSchema),
77
+ }));
78
+ }
79
+ processTextGenerationResponse(response) {
67
80
  return {
68
81
  response,
69
82
  textGenerationResults: response.choices.map((choice) => {
@@ -92,8 +105,7 @@ export class AbstractOpenAICompletionModel extends AbstractModel {
92
105
  }
93
106
  }
94
107
  doStreamText(prompt, options) {
95
- return this.callAPI(prompt, {
96
- ...options,
108
+ return this.callAPI(prompt, options, {
97
109
  responseFormat: OpenAITextResponseFormat.deltaIterable,
98
110
  });
99
111
  }
@@ -31,7 +31,7 @@ class AzureOpenAIApiConfiguration extends AbstractApiConfiguration_js_1.Abstract
31
31
  writable: true,
32
32
  value: void 0
33
33
  });
34
- Object.defineProperty(this, "headers", {
34
+ Object.defineProperty(this, "fixedHeaderValue", {
35
35
  enumerable: true,
36
36
  configurable: true,
37
37
  writable: true,
@@ -40,7 +40,7 @@ class AzureOpenAIApiConfiguration extends AbstractApiConfiguration_js_1.Abstract
40
40
  this.resourceName = resourceName;
41
41
  this.deploymentId = deploymentId;
42
42
  this.apiVersion = apiVersion;
43
- this.headers = {
43
+ this.fixedHeaderValue = {
44
44
  "api-key": (0, loadApiKey_js_1.loadApiKey)({
45
45
  apiKey,
46
46
  environmentVariableName: "AZURE_OPENAI_API_KEY",
@@ -51,5 +51,8 @@ class AzureOpenAIApiConfiguration extends AbstractApiConfiguration_js_1.Abstract
51
51
  assembleUrl(path) {
52
52
  return `https://${this.resourceName}.openai.azure.com/openai/deployments/${this.deploymentId}${path}?api-version=${this.apiVersion}`;
53
53
  }
54
+ fixedHeaders() {
55
+ return this.fixedHeaderValue;
56
+ }
54
57
  }
55
58
  exports.AzureOpenAIApiConfiguration = AzureOpenAIApiConfiguration;
@@ -20,7 +20,8 @@ export declare class AzureOpenAIApiConfiguration extends AbstractApiConfiguratio
20
20
  readonly resourceName: string;
21
21
  readonly deploymentId: string;
22
22
  readonly apiVersion: string;
23
- readonly headers: Record<string, string>;
23
+ readonly fixedHeaderValue: Record<string, string>;
24
24
  constructor({ resourceName, deploymentId, apiVersion, apiKey, retry, throttle, }: AzureOpenAIApiConfigurationOptions);
25
25
  assembleUrl(path: string): string;
26
+ fixedHeaders(): Record<string, string>;
26
27
  }
@@ -28,7 +28,7 @@ export class AzureOpenAIApiConfiguration extends AbstractApiConfiguration {
28
28
  writable: true,
29
29
  value: void 0
30
30
  });
31
- Object.defineProperty(this, "headers", {
31
+ Object.defineProperty(this, "fixedHeaderValue", {
32
32
  enumerable: true,
33
33
  configurable: true,
34
34
  writable: true,
@@ -37,7 +37,7 @@ export class AzureOpenAIApiConfiguration extends AbstractApiConfiguration {
37
37
  this.resourceName = resourceName;
38
38
  this.deploymentId = deploymentId;
39
39
  this.apiVersion = apiVersion;
40
- this.headers = {
40
+ this.fixedHeaderValue = {
41
41
  "api-key": loadApiKey({
42
42
  apiKey,
43
43
  environmentVariableName: "AZURE_OPENAI_API_KEY",
@@ -48,4 +48,7 @@ export class AzureOpenAIApiConfiguration extends AbstractApiConfiguration {
48
48
  assembleUrl(path) {
49
49
  return `https://${this.resourceName}.openai.azure.com/openai/deployments/${this.deploymentId}${path}?api-version=${this.apiVersion}`;
50
50
  }
51
+ fixedHeaders() {
52
+ return this.fixedHeaderValue;
53
+ }
51
54
  }
@@ -44,10 +44,18 @@ class OpenAIChatFunctionCallStructureGenerationModel {
44
44
  return this.model.modelInformation;
45
45
  }
46
46
  get settings() {
47
- return this.model.settings;
47
+ return {
48
+ ...this.model.settings,
49
+ fnName: this.fnName,
50
+ fnDescription: this.fnDescription,
51
+ };
48
52
  }
49
53
  get settingsForEvent() {
50
- return this.model.settingsForEvent;
54
+ return {
55
+ ...this.model.settingsForEvent,
56
+ fnName: this.fnName,
57
+ fnDescription: this.fnDescription,
58
+ };
51
59
  }
52
60
  /**
53
61
  * Returns this model with a text prompt template.
@@ -100,8 +108,7 @@ class OpenAIChatFunctionCallStructureGenerationModel {
100
108
  ...this.promptTemplate.stopSequences,
101
109
  ],
102
110
  })
103
- .callAPI(expandedPrompt, {
104
- ...options,
111
+ .callAPI(expandedPrompt, options, {
105
112
  responseFormat: AbstractOpenAIChatModel_js_1.OpenAIChatResponseFormat.json,
106
113
  functionCall: { name: this.fnName },
107
114
  functions: [
@@ -131,8 +138,7 @@ class OpenAIChatFunctionCallStructureGenerationModel {
131
138
  async doStreamStructure(schema, prompt, // first argument of the function
132
139
  options) {
133
140
  const expandedPrompt = this.promptTemplate.format(prompt);
134
- return this.model.callAPI(expandedPrompt, {
135
- ...options,
141
+ return this.model.callAPI(expandedPrompt, options, {
136
142
  responseFormat: AbstractOpenAIChatModel_js_1.OpenAIChatResponseFormat.deltaIterable,
137
143
  functionCall: { name: this.fnName },
138
144
  functions: [
@@ -1,4 +1,4 @@
1
- import { FunctionOptions } from "../../core/FunctionOptions.js";
1
+ import { FunctionCallOptions } from "../../core/FunctionOptions.js";
2
2
  import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
3
3
  import { Schema } from "../../core/schema/Schema.js";
4
4
  import { StructureStreamingModel } from "../../model-function/generate-structure/StructureGenerationModel.js";
@@ -18,8 +18,92 @@ OpenAIChatSettings> {
18
18
  promptTemplate: PROMPT_TEMPLATE;
19
19
  });
20
20
  get modelInformation(): import("../../index.js").ModelInformation;
21
- get settings(): OpenAIChatSettings;
22
- get settingsForEvent(): Partial<OpenAIChatSettings>;
21
+ get settings(): {
22
+ fnName: string;
23
+ fnDescription: string | undefined;
24
+ model: import("./OpenAIChatModel.js").OpenAIChatModelType;
25
+ api?: import("../../index.js").ApiConfiguration | undefined;
26
+ functions?: {
27
+ name: string;
28
+ description?: string | undefined;
29
+ parameters: unknown;
30
+ }[] | undefined;
31
+ functionCall?: "auto" | {
32
+ name: string;
33
+ } | "none" | undefined;
34
+ tools?: {
35
+ type: "function";
36
+ function: {
37
+ name: string;
38
+ description?: string | undefined;
39
+ parameters: unknown;
40
+ };
41
+ }[] | undefined;
42
+ toolChoice?: "auto" | "none" | {
43
+ type: "function";
44
+ function: {
45
+ name: string;
46
+ };
47
+ } | undefined;
48
+ temperature?: number | undefined;
49
+ topP?: number | undefined;
50
+ seed?: number | null | undefined;
51
+ presencePenalty?: number | undefined;
52
+ frequencyPenalty?: number | undefined;
53
+ responseFormat?: {
54
+ type?: "text" | "json_object" | undefined;
55
+ } | undefined;
56
+ logitBias?: Record<number, number> | undefined;
57
+ isUserIdForwardingEnabled?: boolean | undefined;
58
+ maxGenerationTokens?: number | undefined;
59
+ stopSequences?: string[] | undefined;
60
+ numberOfGenerations?: number | undefined;
61
+ trimWhitespace?: boolean | undefined;
62
+ observers?: import("../../index.js").FunctionObserver[] | undefined;
63
+ };
64
+ get settingsForEvent(): {
65
+ fnName: string;
66
+ fnDescription: string | undefined;
67
+ model?: import("./OpenAIChatModel.js").OpenAIChatModelType | undefined;
68
+ api?: import("../../index.js").ApiConfiguration | undefined;
69
+ functions?: {
70
+ name: string;
71
+ description?: string | undefined;
72
+ parameters: unknown;
73
+ }[] | undefined;
74
+ functionCall?: "auto" | {
75
+ name: string;
76
+ } | "none" | undefined;
77
+ tools?: {
78
+ type: "function";
79
+ function: {
80
+ name: string;
81
+ description?: string | undefined;
82
+ parameters: unknown;
83
+ };
84
+ }[] | undefined;
85
+ toolChoice?: "auto" | "none" | {
86
+ type: "function";
87
+ function: {
88
+ name: string;
89
+ };
90
+ } | undefined;
91
+ temperature?: number | undefined;
92
+ topP?: number | undefined;
93
+ seed?: number | null | undefined;
94
+ presencePenalty?: number | undefined;
95
+ frequencyPenalty?: number | undefined;
96
+ responseFormat?: {
97
+ type?: "text" | "json_object" | undefined;
98
+ } | undefined;
99
+ logitBias?: Record<number, number> | undefined;
100
+ isUserIdForwardingEnabled?: boolean | undefined;
101
+ maxGenerationTokens?: number | undefined;
102
+ stopSequences?: string[] | undefined;
103
+ numberOfGenerations?: number | undefined;
104
+ trimWhitespace?: boolean | undefined;
105
+ observers?: import("../../index.js").FunctionObserver[] | undefined;
106
+ };
23
107
  /**
24
108
  * Returns this model with a text prompt template.
25
109
  */
@@ -42,7 +126,7 @@ OpenAIChatSettings> {
42
126
  * @see https://platform.openai.com/docs/guides/gpt/function-calling
43
127
  */
44
128
  doGenerateStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_TEMPLATE["format"]>[0], // first argument of the function
45
- options?: FunctionOptions): Promise<{
129
+ options: FunctionCallOptions): Promise<{
46
130
  response: {
47
131
  object: "chat.completion";
48
132
  model: string;
@@ -85,7 +169,7 @@ OpenAIChatSettings> {
85
169
  };
86
170
  }>;
87
171
  doStreamStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_TEMPLATE["format"]>[0], // first argument of the function
88
- options?: FunctionOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
172
+ options: FunctionCallOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
89
173
  object: "chat.completion.chunk";
90
174
  model: string;
91
175
  id: string;