modelfusion 0.113.0 → 0.114.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +65 -0
- package/README.md +89 -89
- package/core/FunctionOptions.d.ts +14 -0
- package/core/api/AbstractApiConfiguration.cjs +16 -1
- package/core/api/AbstractApiConfiguration.d.ts +7 -3
- package/core/api/AbstractApiConfiguration.js +16 -1
- package/core/api/ApiConfiguration.d.ts +10 -1
- package/core/api/BaseUrlApiConfiguration.cjs +9 -5
- package/core/api/BaseUrlApiConfiguration.d.ts +7 -7
- package/core/api/BaseUrlApiConfiguration.js +9 -5
- package/core/api/CustomHeaderProvider.cjs +2 -0
- package/core/api/CustomHeaderProvider.d.ts +2 -0
- package/core/api/CustomHeaderProvider.js +1 -0
- package/core/api/index.cjs +1 -0
- package/core/api/index.d.ts +1 -0
- package/core/api/index.js +1 -0
- package/core/cache/Cache.cjs +2 -0
- package/core/cache/Cache.d.ts +12 -0
- package/core/cache/Cache.js +1 -0
- package/core/cache/MemoryCache.cjs +23 -0
- package/core/cache/MemoryCache.d.ts +15 -0
- package/core/cache/MemoryCache.js +19 -0
- package/core/cache/index.cjs +18 -0
- package/core/cache/index.d.ts +2 -0
- package/core/cache/index.js +2 -0
- package/core/index.cjs +1 -0
- package/core/index.d.ts +1 -0
- package/core/index.js +1 -0
- package/core/schema/TypeValidationError.cjs +36 -0
- package/core/schema/TypeValidationError.d.ts +15 -0
- package/core/schema/TypeValidationError.js +32 -0
- package/core/schema/index.cjs +2 -0
- package/core/schema/index.d.ts +2 -0
- package/core/schema/index.js +2 -0
- package/core/schema/parseJSON.cjs +6 -14
- package/core/schema/parseJSON.d.ts +3 -2
- package/core/schema/parseJSON.js +6 -14
- package/core/schema/validateTypes.cjs +65 -0
- package/core/schema/validateTypes.d.ts +34 -0
- package/core/schema/validateTypes.js +60 -0
- package/model-function/embed/EmbeddingModel.d.ts +2 -2
- package/model-function/executeStandardCall.cjs +3 -1
- package/model-function/executeStandardCall.d.ts +2 -2
- package/model-function/executeStandardCall.js +3 -1
- package/model-function/executeStreamCall.cjs +2 -1
- package/model-function/executeStreamCall.d.ts +2 -2
- package/model-function/executeStreamCall.js +2 -1
- package/model-function/generate-image/ImageGenerationModel.d.ts +2 -2
- package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +2 -2
- package/model-function/generate-speech/SpeechGenerationModel.d.ts +3 -3
- package/model-function/generate-structure/generateStructure.cjs +4 -1
- package/model-function/generate-structure/generateStructure.js +4 -1
- package/model-function/generate-structure/streamStructure.cjs +4 -1
- package/model-function/generate-structure/streamStructure.js +4 -1
- package/model-function/generate-text/PromptTemplateTextGenerationModel.cjs +3 -0
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +11 -2
- package/model-function/generate-text/PromptTemplateTextGenerationModel.js +3 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +2 -2
- package/model-function/generate-text/TextGenerationModel.d.ts +12 -3
- package/model-function/generate-text/generateText.cjs +43 -1
- package/model-function/generate-text/generateText.js +43 -1
- package/model-function/generate-transcription/TranscriptionModel.d.ts +2 -2
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +20 -8
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +27 -5
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +20 -8
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +8 -3
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +3 -3
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +8 -3
- package/model-provider/cohere/CohereTextEmbeddingModel.cjs +8 -3
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
- package/model-provider/cohere/CohereTextEmbeddingModel.js +8 -3
- package/model-provider/cohere/CohereTextGenerationModel.cjs +20 -8
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +45 -5
- package/model-provider/cohere/CohereTextGenerationModel.js +20 -8
- package/model-provider/cohere/CohereTokenizer.cjs +16 -6
- package/model-provider/cohere/CohereTokenizer.d.ts +3 -3
- package/model-provider/cohere/CohereTokenizer.js +16 -6
- package/model-provider/elevenlabs/ElevenLabsApiConfiguration.cjs +1 -1
- package/model-provider/elevenlabs/ElevenLabsApiConfiguration.js +1 -1
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +8 -3
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.d.ts +2 -2
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +8 -3
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +8 -3
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +3 -3
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +8 -3
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +18 -4
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +21 -3
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +18 -4
- package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +20 -8
- package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +125 -5
- package/model-provider/llamacpp/LlamaCppCompletionModel.js +20 -8
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +8 -3
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +3 -3
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +8 -3
- package/model-provider/llamacpp/LlamaCppTokenizer.cjs +8 -3
- package/model-provider/llamacpp/LlamaCppTokenizer.d.ts +2 -2
- package/model-provider/llamacpp/LlamaCppTokenizer.js +8 -3
- package/model-provider/lmnt/LmntSpeechModel.cjs +8 -3
- package/model-provider/lmnt/LmntSpeechModel.d.ts +2 -2
- package/model-provider/lmnt/LmntSpeechModel.js +8 -3
- package/model-provider/mistral/MistralChatModel.cjs +20 -8
- package/model-provider/mistral/MistralChatModel.d.ts +55 -5
- package/model-provider/mistral/MistralChatModel.js +20 -8
- package/model-provider/mistral/MistralTextEmbeddingModel.cjs +8 -3
- package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +3 -3
- package/model-provider/mistral/MistralTextEmbeddingModel.js +8 -3
- package/model-provider/ollama/OllamaChatModel.cjs +20 -8
- package/model-provider/ollama/OllamaChatModel.d.ts +27 -5
- package/model-provider/ollama/OllamaChatModel.js +20 -8
- package/model-provider/ollama/OllamaCompletionModel.cjs +20 -7
- package/model-provider/ollama/OllamaCompletionModel.d.ts +43 -5
- package/model-provider/ollama/OllamaCompletionModel.js +20 -7
- package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +8 -3
- package/model-provider/ollama/OllamaTextEmbeddingModel.d.ts +3 -3
- package/model-provider/ollama/OllamaTextEmbeddingModel.js +8 -3
- package/model-provider/openai/AbstractOpenAIChatModel.cjs +25 -15
- package/model-provider/openai/AbstractOpenAIChatModel.d.ts +108 -21
- package/model-provider/openai/AbstractOpenAIChatModel.js +25 -15
- package/model-provider/openai/AbstractOpenAICompletionModel.cjs +21 -9
- package/model-provider/openai/AbstractOpenAICompletionModel.d.ts +35 -5
- package/model-provider/openai/AbstractOpenAICompletionModel.js +21 -9
- package/model-provider/openai/AzureOpenAIApiConfiguration.cjs +5 -2
- package/model-provider/openai/AzureOpenAIApiConfiguration.d.ts +2 -1
- package/model-provider/openai/AzureOpenAIApiConfiguration.js +5 -2
- package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.cjs +12 -6
- package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.d.ts +91 -7
- package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.js +12 -6
- package/model-provider/openai/OpenAIImageGenerationModel.cjs +10 -6
- package/model-provider/openai/OpenAIImageGenerationModel.d.ts +4 -4
- package/model-provider/openai/OpenAIImageGenerationModel.js +10 -6
- package/model-provider/openai/OpenAISpeechModel.cjs +9 -4
- package/model-provider/openai/OpenAISpeechModel.d.ts +3 -3
- package/model-provider/openai/OpenAISpeechModel.js +9 -4
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +11 -6
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +3 -3
- package/model-provider/openai/OpenAITextEmbeddingModel.js +11 -6
- package/model-provider/openai/OpenAITranscriptionModel.cjs +9 -6
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +4 -4
- package/model-provider/openai/OpenAITranscriptionModel.js +9 -6
- package/model-provider/stability/StabilityImageGenerationModel.cjs +10 -5
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +3 -3
- package/model-provider/stability/StabilityImageGenerationModel.js +10 -5
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +9 -7
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.d.ts +3 -3
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +9 -7
- package/observability/helicone/HeliconeOpenAIApiConfiguration.cjs +2 -1
- package/observability/helicone/HeliconeOpenAIApiConfiguration.d.ts +3 -1
- package/observability/helicone/HeliconeOpenAIApiConfiguration.js +2 -1
- package/package.json +1 -1
@@ -1,5 +1,5 @@
|
|
1
1
|
import { z } from "zod";
|
2
|
-
import {
|
2
|
+
import { FunctionCallOptions } from "../../core/FunctionOptions.js";
|
3
3
|
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
4
4
|
import { ResponseHandler } from "../../core/api/postToApi.js";
|
5
5
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
@@ -82,15 +82,14 @@ export type OpenAIChatPrompt = OpenAIChatMessage[];
|
|
82
82
|
*/
|
83
83
|
export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractOpenAIChatSettings> extends AbstractModel<SETTINGS> {
|
84
84
|
constructor(settings: SETTINGS);
|
85
|
-
callAPI<RESULT>(messages: OpenAIChatPrompt, options: {
|
85
|
+
callAPI<RESULT>(messages: OpenAIChatPrompt, callOptions: FunctionCallOptions, options: {
|
86
86
|
responseFormat: OpenAIChatResponseFormatType<RESULT>;
|
87
|
-
} & FunctionOptions & {
|
88
87
|
functions?: AbstractOpenAIChatSettings["functions"];
|
89
88
|
functionCall?: AbstractOpenAIChatSettings["functionCall"];
|
90
89
|
tools?: AbstractOpenAIChatSettings["tools"];
|
91
90
|
toolChoice?: AbstractOpenAIChatSettings["toolChoice"];
|
92
91
|
}): Promise<RESULT>;
|
93
|
-
doGenerateTexts(prompt: OpenAIChatPrompt, options
|
92
|
+
doGenerateTexts(prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<{
|
94
93
|
response: {
|
95
94
|
object: "chat.completion";
|
96
95
|
model: string;
|
@@ -118,7 +117,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
118
117
|
id: string;
|
119
118
|
}[] | undefined;
|
120
119
|
};
|
121
|
-
index
|
120
|
+
index?: number | undefined;
|
122
121
|
logprobs?: any;
|
123
122
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
124
123
|
}[];
|
@@ -134,10 +133,97 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
134
133
|
totalTokens: number;
|
135
134
|
};
|
136
135
|
}>;
|
136
|
+
restoreGeneratedTexts(rawResponse: unknown): {
|
137
|
+
response: {
|
138
|
+
object: "chat.completion";
|
139
|
+
model: string;
|
140
|
+
usage: {
|
141
|
+
prompt_tokens: number;
|
142
|
+
completion_tokens: number;
|
143
|
+
total_tokens: number;
|
144
|
+
};
|
145
|
+
id: string;
|
146
|
+
created: number;
|
147
|
+
choices: {
|
148
|
+
message: {
|
149
|
+
role: "assistant";
|
150
|
+
content: string | null;
|
151
|
+
function_call?: {
|
152
|
+
name: string;
|
153
|
+
arguments: string;
|
154
|
+
} | undefined;
|
155
|
+
tool_calls?: {
|
156
|
+
function: {
|
157
|
+
name: string;
|
158
|
+
arguments: string;
|
159
|
+
};
|
160
|
+
type: "function";
|
161
|
+
id: string;
|
162
|
+
}[] | undefined;
|
163
|
+
};
|
164
|
+
index?: number | undefined;
|
165
|
+
logprobs?: any;
|
166
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
167
|
+
}[];
|
168
|
+
system_fingerprint?: string | null | undefined;
|
169
|
+
};
|
170
|
+
textGenerationResults: {
|
171
|
+
text: string;
|
172
|
+
finishReason: TextGenerationFinishReason;
|
173
|
+
}[];
|
174
|
+
usage: {
|
175
|
+
promptTokens: number;
|
176
|
+
completionTokens: number;
|
177
|
+
totalTokens: number;
|
178
|
+
};
|
179
|
+
};
|
180
|
+
processTextGenerationResponse(response: OpenAIChatResponse): {
|
181
|
+
response: {
|
182
|
+
object: "chat.completion";
|
183
|
+
model: string;
|
184
|
+
usage: {
|
185
|
+
prompt_tokens: number;
|
186
|
+
completion_tokens: number;
|
187
|
+
total_tokens: number;
|
188
|
+
};
|
189
|
+
id: string;
|
190
|
+
created: number;
|
191
|
+
choices: {
|
192
|
+
message: {
|
193
|
+
role: "assistant";
|
194
|
+
content: string | null;
|
195
|
+
function_call?: {
|
196
|
+
name: string;
|
197
|
+
arguments: string;
|
198
|
+
} | undefined;
|
199
|
+
tool_calls?: {
|
200
|
+
function: {
|
201
|
+
name: string;
|
202
|
+
arguments: string;
|
203
|
+
};
|
204
|
+
type: "function";
|
205
|
+
id: string;
|
206
|
+
}[] | undefined;
|
207
|
+
};
|
208
|
+
index?: number | undefined;
|
209
|
+
logprobs?: any;
|
210
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
211
|
+
}[];
|
212
|
+
system_fingerprint?: string | null | undefined;
|
213
|
+
};
|
214
|
+
textGenerationResults: {
|
215
|
+
text: string;
|
216
|
+
finishReason: TextGenerationFinishReason;
|
217
|
+
}[];
|
218
|
+
usage: {
|
219
|
+
promptTokens: number;
|
220
|
+
completionTokens: number;
|
221
|
+
totalTokens: number;
|
222
|
+
};
|
223
|
+
};
|
137
224
|
private translateFinishReason;
|
138
|
-
doStreamText(prompt: OpenAIChatPrompt, options
|
225
|
+
doStreamText(prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
|
139
226
|
object: "chat.completion.chunk";
|
140
|
-
model: string;
|
141
227
|
id: string;
|
142
228
|
created: number;
|
143
229
|
choices: {
|
@@ -160,10 +246,11 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
160
246
|
index: number;
|
161
247
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
162
248
|
}[];
|
249
|
+
model?: string | undefined;
|
163
250
|
system_fingerprint?: string | null | undefined;
|
164
251
|
}>>>;
|
165
252
|
extractTextDelta(delta: unknown): string | undefined;
|
166
|
-
doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: OpenAIChatPrompt, options
|
253
|
+
doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<{
|
167
254
|
response: {
|
168
255
|
object: "chat.completion";
|
169
256
|
model: string;
|
@@ -191,7 +278,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
191
278
|
id: string;
|
192
279
|
}[] | undefined;
|
193
280
|
};
|
194
|
-
index
|
281
|
+
index?: number | undefined;
|
195
282
|
logprobs?: any;
|
196
283
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
197
284
|
}[];
|
@@ -207,7 +294,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
207
294
|
totalTokens: number;
|
208
295
|
};
|
209
296
|
}>;
|
210
|
-
doGenerateToolCalls(tools: Array<ToolDefinition<string, unknown>>, prompt: OpenAIChatPrompt, options
|
297
|
+
doGenerateToolCalls(tools: Array<ToolDefinition<string, unknown>>, prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<{
|
211
298
|
response: {
|
212
299
|
object: "chat.completion";
|
213
300
|
model: string;
|
@@ -235,7 +322,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
235
322
|
id: string;
|
236
323
|
}[] | undefined;
|
237
324
|
};
|
238
|
-
index
|
325
|
+
index?: number | undefined;
|
239
326
|
logprobs?: any;
|
240
327
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
241
328
|
}[];
|
@@ -334,7 +421,7 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
334
421
|
id: string;
|
335
422
|
}[] | undefined;
|
336
423
|
}>;
|
337
|
-
index: z.ZodNumber
|
424
|
+
index: z.ZodOptional<z.ZodNumber>;
|
338
425
|
logprobs: z.ZodNullable<z.ZodAny>;
|
339
426
|
finish_reason: z.ZodNullable<z.ZodOptional<z.ZodEnum<["stop", "length", "tool_calls", "content_filter", "function_call"]>>>;
|
340
427
|
}, "strip", z.ZodTypeAny, {
|
@@ -354,7 +441,7 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
354
441
|
id: string;
|
355
442
|
}[] | undefined;
|
356
443
|
};
|
357
|
-
index
|
444
|
+
index?: number | undefined;
|
358
445
|
logprobs?: any;
|
359
446
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
360
447
|
}, {
|
@@ -374,7 +461,7 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
374
461
|
id: string;
|
375
462
|
}[] | undefined;
|
376
463
|
};
|
377
|
-
index
|
464
|
+
index?: number | undefined;
|
378
465
|
logprobs?: any;
|
379
466
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
380
467
|
}>, "many">;
|
@@ -422,7 +509,7 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
422
509
|
id: string;
|
423
510
|
}[] | undefined;
|
424
511
|
};
|
425
|
-
index
|
512
|
+
index?: number | undefined;
|
426
513
|
logprobs?: any;
|
427
514
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
428
515
|
}[];
|
@@ -454,7 +541,7 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
454
541
|
id: string;
|
455
542
|
}[] | undefined;
|
456
543
|
};
|
457
|
-
index
|
544
|
+
index?: number | undefined;
|
458
545
|
logprobs?: any;
|
459
546
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
460
547
|
}[];
|
@@ -579,11 +666,10 @@ declare const openaiChatChunkSchema: z.ZodObject<{
|
|
579
666
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
580
667
|
}>, "many">;
|
581
668
|
created: z.ZodNumber;
|
582
|
-
model: z.ZodString
|
669
|
+
model: z.ZodOptional<z.ZodString>;
|
583
670
|
system_fingerprint: z.ZodNullable<z.ZodOptional<z.ZodString>>;
|
584
671
|
}, "strip", z.ZodTypeAny, {
|
585
672
|
object: "chat.completion.chunk";
|
586
|
-
model: string;
|
587
673
|
id: string;
|
588
674
|
created: number;
|
589
675
|
choices: {
|
@@ -606,10 +692,10 @@ declare const openaiChatChunkSchema: z.ZodObject<{
|
|
606
692
|
index: number;
|
607
693
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
608
694
|
}[];
|
695
|
+
model?: string | undefined;
|
609
696
|
system_fingerprint?: string | null | undefined;
|
610
697
|
}, {
|
611
698
|
object: "chat.completion.chunk";
|
612
|
-
model: string;
|
613
699
|
id: string;
|
614
700
|
created: number;
|
615
701
|
choices: {
|
@@ -632,6 +718,7 @@ declare const openaiChatChunkSchema: z.ZodObject<{
|
|
632
718
|
index: number;
|
633
719
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
634
720
|
}[];
|
721
|
+
model?: string | undefined;
|
635
722
|
system_fingerprint?: string | null | undefined;
|
636
723
|
}>;
|
637
724
|
export type OpenAIChatChunk = z.infer<typeof openaiChatChunkSchema>;
|
@@ -672,7 +759,7 @@ export declare const OpenAIChatResponseFormat: {
|
|
672
759
|
id: string;
|
673
760
|
}[] | undefined;
|
674
761
|
};
|
675
|
-
index
|
762
|
+
index?: number | undefined;
|
676
763
|
logprobs?: any;
|
677
764
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
678
765
|
}[];
|
@@ -688,7 +775,6 @@ export declare const OpenAIChatResponseFormat: {
|
|
688
775
|
response: Response;
|
689
776
|
}) => Promise<AsyncIterable<import("../../index.js").Delta<{
|
690
777
|
object: "chat.completion.chunk";
|
691
|
-
model: string;
|
692
778
|
id: string;
|
693
779
|
created: number;
|
694
780
|
choices: {
|
@@ -711,6 +797,7 @@ export declare const OpenAIChatResponseFormat: {
|
|
711
797
|
index: number;
|
712
798
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
713
799
|
}[];
|
800
|
+
model?: string | undefined;
|
714
801
|
system_fingerprint?: string | null | undefined;
|
715
802
|
}>>>;
|
716
803
|
};
|
@@ -3,6 +3,7 @@ import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottl
|
|
3
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
4
|
import { zodSchema } from "../../core/schema/ZodSchema.js";
|
5
5
|
import { parseJSON } from "../../core/schema/parseJSON.js";
|
6
|
+
import { validateTypes } from "../../core/schema/validateTypes.js";
|
6
7
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
7
8
|
import { createEventSourceResponseHandler } from "../../util/streaming/createEventSourceResponseHandler.js";
|
8
9
|
import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
|
@@ -16,12 +17,12 @@ export class AbstractOpenAIChatModel extends AbstractModel {
|
|
16
17
|
constructor(settings) {
|
17
18
|
super({ settings });
|
18
19
|
}
|
19
|
-
async callAPI(messages, options) {
|
20
|
+
async callAPI(messages, callOptions, options) {
|
20
21
|
const api = this.settings.api ?? new OpenAIApiConfiguration();
|
21
22
|
const responseFormat = options.responseFormat;
|
22
|
-
const abortSignal =
|
23
|
+
const abortSignal = callOptions.run?.abortSignal;
|
23
24
|
const user = this.settings.isUserIdForwardingEnabled
|
24
|
-
?
|
25
|
+
? callOptions.run?.userId
|
25
26
|
: undefined;
|
26
27
|
const openAIResponseFormat = this.settings.responseFormat;
|
27
28
|
// function & tool calling:
|
@@ -42,7 +43,12 @@ export class AbstractOpenAIChatModel extends AbstractModel {
|
|
42
43
|
}
|
43
44
|
return postJsonToApi({
|
44
45
|
url: api.assembleUrl("/chat/completions"),
|
45
|
-
headers: api.headers
|
46
|
+
headers: api.headers({
|
47
|
+
functionType: callOptions.functionType,
|
48
|
+
functionId: callOptions.functionId,
|
49
|
+
run: callOptions.run,
|
50
|
+
callId: callOptions.callId,
|
51
|
+
}),
|
46
52
|
body: {
|
47
53
|
stream: responseFormat.stream,
|
48
54
|
model: this.settings.model,
|
@@ -71,10 +77,17 @@ export class AbstractOpenAIChatModel extends AbstractModel {
|
|
71
77
|
});
|
72
78
|
}
|
73
79
|
async doGenerateTexts(prompt, options) {
|
74
|
-
|
75
|
-
...options,
|
80
|
+
return this.processTextGenerationResponse(await this.callAPI(prompt, options, {
|
76
81
|
responseFormat: OpenAIChatResponseFormat.json,
|
77
|
-
});
|
82
|
+
}));
|
83
|
+
}
|
84
|
+
restoreGeneratedTexts(rawResponse) {
|
85
|
+
return this.processTextGenerationResponse(validateTypes({
|
86
|
+
structure: rawResponse,
|
87
|
+
schema: zodSchema(openAIChatResponseSchema),
|
88
|
+
}));
|
89
|
+
}
|
90
|
+
processTextGenerationResponse(response) {
|
78
91
|
return {
|
79
92
|
response,
|
80
93
|
textGenerationResults: response.choices.map((choice) => ({
|
@@ -100,8 +113,7 @@ export class AbstractOpenAIChatModel extends AbstractModel {
|
|
100
113
|
}
|
101
114
|
}
|
102
115
|
doStreamText(prompt, options) {
|
103
|
-
return this.callAPI(prompt, {
|
104
|
-
...options,
|
116
|
+
return this.callAPI(prompt, options, {
|
105
117
|
responseFormat: OpenAIChatResponseFormat.deltaIterable,
|
106
118
|
});
|
107
119
|
}
|
@@ -118,8 +130,7 @@ export class AbstractOpenAIChatModel extends AbstractModel {
|
|
118
130
|
return firstChoice.delta.content ?? undefined;
|
119
131
|
}
|
120
132
|
async doGenerateToolCall(tool, prompt, options) {
|
121
|
-
const response = await this.callAPI(prompt, {
|
122
|
-
...options,
|
133
|
+
const response = await this.callAPI(prompt, options, {
|
123
134
|
responseFormat: OpenAIChatResponseFormat.json,
|
124
135
|
toolChoice: {
|
125
136
|
type: "function",
|
@@ -149,8 +160,7 @@ export class AbstractOpenAIChatModel extends AbstractModel {
|
|
149
160
|
};
|
150
161
|
}
|
151
162
|
async doGenerateToolCalls(tools, prompt, options) {
|
152
|
-
const response = await this.callAPI(prompt, {
|
153
|
-
...options,
|
163
|
+
const response = await this.callAPI(prompt, options, {
|
154
164
|
responseFormat: OpenAIChatResponseFormat.json,
|
155
165
|
toolChoice: "auto",
|
156
166
|
tools: tools.map((tool) => ({
|
@@ -205,7 +215,7 @@ const openAIChatResponseSchema = z.object({
|
|
205
215
|
}))
|
206
216
|
.optional(),
|
207
217
|
}),
|
208
|
-
index: z.number(),
|
218
|
+
index: z.number().optional(), // optional for OpenAI compatible models
|
209
219
|
logprobs: z.nullable(z.any()),
|
210
220
|
finish_reason: z
|
211
221
|
.enum([
|
@@ -265,7 +275,7 @@ const openaiChatChunkSchema = z.object({
|
|
265
275
|
index: z.number(),
|
266
276
|
})),
|
267
277
|
created: z.number(),
|
268
|
-
model: z.string(),
|
278
|
+
model: z.string().optional(), // optional for OpenAI compatible models
|
269
279
|
system_fingerprint: z.string().optional().nullable(),
|
270
280
|
});
|
271
281
|
export const OpenAIChatResponseFormat = {
|
@@ -5,6 +5,7 @@ const zod_1 = require("zod");
|
|
5
5
|
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
6
6
|
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
7
7
|
const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
|
8
|
+
const validateTypes_js_1 = require("../../core/schema/validateTypes.cjs");
|
8
9
|
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
9
10
|
const createEventSourceResponseHandler_js_1 = require("../../util/streaming/createEventSourceResponseHandler.cjs");
|
10
11
|
const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
|
@@ -18,12 +19,12 @@ class AbstractOpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
|
|
18
19
|
constructor(settings) {
|
19
20
|
super({ settings });
|
20
21
|
}
|
21
|
-
async callAPI(prompt, options) {
|
22
|
+
async callAPI(prompt, callOptions, options) {
|
22
23
|
const api = this.settings.api ?? new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration();
|
23
24
|
const user = this.settings.isUserIdForwardingEnabled
|
24
|
-
?
|
25
|
+
? callOptions.run?.userId
|
25
26
|
: undefined;
|
26
|
-
const abortSignal =
|
27
|
+
const abortSignal = callOptions.run?.abortSignal;
|
27
28
|
const openaiResponseFormat = options.responseFormat;
|
28
29
|
// empty arrays are not allowed for stop:
|
29
30
|
const stopSequences = this.settings.stopSequences != null &&
|
@@ -36,7 +37,12 @@ class AbstractOpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
|
|
36
37
|
throttle: api.throttle,
|
37
38
|
call: async () => (0, postToApi_js_1.postJsonToApi)({
|
38
39
|
url: api.assembleUrl("/completions"),
|
39
|
-
headers: api.headers
|
40
|
+
headers: api.headers({
|
41
|
+
functionType: callOptions.functionType,
|
42
|
+
functionId: callOptions.functionId,
|
43
|
+
run: callOptions.run,
|
44
|
+
callId: callOptions.callId,
|
45
|
+
}),
|
40
46
|
body: {
|
41
47
|
stream: openaiResponseFormat.stream,
|
42
48
|
model: this.settings.model,
|
@@ -63,10 +69,17 @@ class AbstractOpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
|
|
63
69
|
});
|
64
70
|
}
|
65
71
|
async doGenerateTexts(prompt, options) {
|
66
|
-
|
67
|
-
...options,
|
72
|
+
return this.processTextGenerationResponse(await this.callAPI(prompt, options, {
|
68
73
|
responseFormat: exports.OpenAITextResponseFormat.json,
|
69
|
-
});
|
74
|
+
}));
|
75
|
+
}
|
76
|
+
restoreGeneratedTexts(rawResponse) {
|
77
|
+
return this.processTextGenerationResponse((0, validateTypes_js_1.validateTypes)({
|
78
|
+
structure: rawResponse,
|
79
|
+
schema: (0, ZodSchema_js_1.zodSchema)(OpenAICompletionResponseSchema),
|
80
|
+
}));
|
81
|
+
}
|
82
|
+
processTextGenerationResponse(response) {
|
70
83
|
return {
|
71
84
|
response,
|
72
85
|
textGenerationResults: response.choices.map((choice) => {
|
@@ -95,8 +108,7 @@ class AbstractOpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
|
|
95
108
|
}
|
96
109
|
}
|
97
110
|
doStreamText(prompt, options) {
|
98
|
-
return this.callAPI(prompt, {
|
99
|
-
...options,
|
111
|
+
return this.callAPI(prompt, options, {
|
100
112
|
responseFormat: exports.OpenAITextResponseFormat.deltaIterable,
|
101
113
|
});
|
102
114
|
}
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { z } from "zod";
|
2
|
-
import {
|
2
|
+
import { FunctionCallOptions } from "../../core/FunctionOptions.js";
|
3
3
|
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
4
4
|
import { ResponseHandler } from "../../core/api/postToApi.js";
|
5
5
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
@@ -27,10 +27,10 @@ export interface AbstractOpenAICompletionModelSettings extends TextGenerationMod
|
|
27
27
|
*/
|
28
28
|
export declare abstract class AbstractOpenAICompletionModel<SETTINGS extends AbstractOpenAICompletionModelSettings> extends AbstractModel<SETTINGS> {
|
29
29
|
constructor(settings: SETTINGS);
|
30
|
-
callAPI<RESULT>(prompt: string, options: {
|
30
|
+
callAPI<RESULT>(prompt: string, callOptions: FunctionCallOptions, options: {
|
31
31
|
responseFormat: OpenAITextResponseFormatType<RESULT>;
|
32
|
-
}
|
33
|
-
doGenerateTexts(prompt: string, options
|
32
|
+
}): Promise<RESULT>;
|
33
|
+
doGenerateTexts(prompt: string, options: FunctionCallOptions): Promise<{
|
34
34
|
response: {
|
35
35
|
object: "text_completion";
|
36
36
|
model: string;
|
@@ -59,8 +59,38 @@ export declare abstract class AbstractOpenAICompletionModel<SETTINGS extends Abs
|
|
59
59
|
totalTokens: number;
|
60
60
|
};
|
61
61
|
}>;
|
62
|
+
restoreGeneratedTexts(rawResponse: unknown): {
|
63
|
+
response: {
|
64
|
+
object: "text_completion";
|
65
|
+
model: string;
|
66
|
+
usage: {
|
67
|
+
prompt_tokens: number;
|
68
|
+
completion_tokens: number;
|
69
|
+
total_tokens: number;
|
70
|
+
};
|
71
|
+
id: string;
|
72
|
+
created: number;
|
73
|
+
choices: {
|
74
|
+
text: string;
|
75
|
+
index: number;
|
76
|
+
finish_reason?: "length" | "stop" | "content_filter" | null | undefined;
|
77
|
+
logprobs?: any;
|
78
|
+
}[];
|
79
|
+
system_fingerprint?: string | undefined;
|
80
|
+
};
|
81
|
+
textGenerationResults: {
|
82
|
+
finishReason: TextGenerationFinishReason;
|
83
|
+
text: string;
|
84
|
+
}[];
|
85
|
+
usage: {
|
86
|
+
promptTokens: number;
|
87
|
+
completionTokens: number;
|
88
|
+
totalTokens: number;
|
89
|
+
};
|
90
|
+
};
|
91
|
+
private processTextGenerationResponse;
|
62
92
|
private translateFinishReason;
|
63
|
-
doStreamText(prompt: string, options
|
93
|
+
doStreamText(prompt: string, options: FunctionCallOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
|
64
94
|
object: "text_completion";
|
65
95
|
model: string;
|
66
96
|
id: string;
|
@@ -2,6 +2,7 @@ import { z } from "zod";
|
|
2
2
|
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
3
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
4
|
import { zodSchema } from "../../core/schema/ZodSchema.js";
|
5
|
+
import { validateTypes } from "../../core/schema/validateTypes.js";
|
5
6
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
6
7
|
import { createEventSourceResponseHandler } from "../../util/streaming/createEventSourceResponseHandler.js";
|
7
8
|
import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
|
@@ -15,12 +16,12 @@ export class AbstractOpenAICompletionModel extends AbstractModel {
|
|
15
16
|
constructor(settings) {
|
16
17
|
super({ settings });
|
17
18
|
}
|
18
|
-
async callAPI(prompt, options) {
|
19
|
+
async callAPI(prompt, callOptions, options) {
|
19
20
|
const api = this.settings.api ?? new OpenAIApiConfiguration();
|
20
21
|
const user = this.settings.isUserIdForwardingEnabled
|
21
|
-
?
|
22
|
+
? callOptions.run?.userId
|
22
23
|
: undefined;
|
23
|
-
const abortSignal =
|
24
|
+
const abortSignal = callOptions.run?.abortSignal;
|
24
25
|
const openaiResponseFormat = options.responseFormat;
|
25
26
|
// empty arrays are not allowed for stop:
|
26
27
|
const stopSequences = this.settings.stopSequences != null &&
|
@@ -33,7 +34,12 @@ export class AbstractOpenAICompletionModel extends AbstractModel {
|
|
33
34
|
throttle: api.throttle,
|
34
35
|
call: async () => postJsonToApi({
|
35
36
|
url: api.assembleUrl("/completions"),
|
36
|
-
headers: api.headers
|
37
|
+
headers: api.headers({
|
38
|
+
functionType: callOptions.functionType,
|
39
|
+
functionId: callOptions.functionId,
|
40
|
+
run: callOptions.run,
|
41
|
+
callId: callOptions.callId,
|
42
|
+
}),
|
37
43
|
body: {
|
38
44
|
stream: openaiResponseFormat.stream,
|
39
45
|
model: this.settings.model,
|
@@ -60,10 +66,17 @@ export class AbstractOpenAICompletionModel extends AbstractModel {
|
|
60
66
|
});
|
61
67
|
}
|
62
68
|
async doGenerateTexts(prompt, options) {
|
63
|
-
|
64
|
-
...options,
|
69
|
+
return this.processTextGenerationResponse(await this.callAPI(prompt, options, {
|
65
70
|
responseFormat: OpenAITextResponseFormat.json,
|
66
|
-
});
|
71
|
+
}));
|
72
|
+
}
|
73
|
+
restoreGeneratedTexts(rawResponse) {
|
74
|
+
return this.processTextGenerationResponse(validateTypes({
|
75
|
+
structure: rawResponse,
|
76
|
+
schema: zodSchema(OpenAICompletionResponseSchema),
|
77
|
+
}));
|
78
|
+
}
|
79
|
+
processTextGenerationResponse(response) {
|
67
80
|
return {
|
68
81
|
response,
|
69
82
|
textGenerationResults: response.choices.map((choice) => {
|
@@ -92,8 +105,7 @@ export class AbstractOpenAICompletionModel extends AbstractModel {
|
|
92
105
|
}
|
93
106
|
}
|
94
107
|
doStreamText(prompt, options) {
|
95
|
-
return this.callAPI(prompt, {
|
96
|
-
...options,
|
108
|
+
return this.callAPI(prompt, options, {
|
97
109
|
responseFormat: OpenAITextResponseFormat.deltaIterable,
|
98
110
|
});
|
99
111
|
}
|
@@ -31,7 +31,7 @@ class AzureOpenAIApiConfiguration extends AbstractApiConfiguration_js_1.Abstract
|
|
31
31
|
writable: true,
|
32
32
|
value: void 0
|
33
33
|
});
|
34
|
-
Object.defineProperty(this, "
|
34
|
+
Object.defineProperty(this, "fixedHeaderValue", {
|
35
35
|
enumerable: true,
|
36
36
|
configurable: true,
|
37
37
|
writable: true,
|
@@ -40,7 +40,7 @@ class AzureOpenAIApiConfiguration extends AbstractApiConfiguration_js_1.Abstract
|
|
40
40
|
this.resourceName = resourceName;
|
41
41
|
this.deploymentId = deploymentId;
|
42
42
|
this.apiVersion = apiVersion;
|
43
|
-
this.
|
43
|
+
this.fixedHeaderValue = {
|
44
44
|
"api-key": (0, loadApiKey_js_1.loadApiKey)({
|
45
45
|
apiKey,
|
46
46
|
environmentVariableName: "AZURE_OPENAI_API_KEY",
|
@@ -51,5 +51,8 @@ class AzureOpenAIApiConfiguration extends AbstractApiConfiguration_js_1.Abstract
|
|
51
51
|
assembleUrl(path) {
|
52
52
|
return `https://${this.resourceName}.openai.azure.com/openai/deployments/${this.deploymentId}${path}?api-version=${this.apiVersion}`;
|
53
53
|
}
|
54
|
+
fixedHeaders() {
|
55
|
+
return this.fixedHeaderValue;
|
56
|
+
}
|
54
57
|
}
|
55
58
|
exports.AzureOpenAIApiConfiguration = AzureOpenAIApiConfiguration;
|
@@ -20,7 +20,8 @@ export declare class AzureOpenAIApiConfiguration extends AbstractApiConfiguratio
|
|
20
20
|
readonly resourceName: string;
|
21
21
|
readonly deploymentId: string;
|
22
22
|
readonly apiVersion: string;
|
23
|
-
readonly
|
23
|
+
readonly fixedHeaderValue: Record<string, string>;
|
24
24
|
constructor({ resourceName, deploymentId, apiVersion, apiKey, retry, throttle, }: AzureOpenAIApiConfigurationOptions);
|
25
25
|
assembleUrl(path: string): string;
|
26
|
+
fixedHeaders(): Record<string, string>;
|
26
27
|
}
|
@@ -28,7 +28,7 @@ export class AzureOpenAIApiConfiguration extends AbstractApiConfiguration {
|
|
28
28
|
writable: true,
|
29
29
|
value: void 0
|
30
30
|
});
|
31
|
-
Object.defineProperty(this, "
|
31
|
+
Object.defineProperty(this, "fixedHeaderValue", {
|
32
32
|
enumerable: true,
|
33
33
|
configurable: true,
|
34
34
|
writable: true,
|
@@ -37,7 +37,7 @@ export class AzureOpenAIApiConfiguration extends AbstractApiConfiguration {
|
|
37
37
|
this.resourceName = resourceName;
|
38
38
|
this.deploymentId = deploymentId;
|
39
39
|
this.apiVersion = apiVersion;
|
40
|
-
this.
|
40
|
+
this.fixedHeaderValue = {
|
41
41
|
"api-key": loadApiKey({
|
42
42
|
apiKey,
|
43
43
|
environmentVariableName: "AZURE_OPENAI_API_KEY",
|
@@ -48,4 +48,7 @@ export class AzureOpenAIApiConfiguration extends AbstractApiConfiguration {
|
|
48
48
|
assembleUrl(path) {
|
49
49
|
return `https://${this.resourceName}.openai.azure.com/openai/deployments/${this.deploymentId}${path}?api-version=${this.apiVersion}`;
|
50
50
|
}
|
51
|
+
fixedHeaders() {
|
52
|
+
return this.fixedHeaderValue;
|
53
|
+
}
|
51
54
|
}
|