modelfusion 0.110.0 → 0.111.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/core/api/ApiFacade.cjs +20 -0
- package/core/api/ApiFacade.d.ts +4 -0
- package/core/api/ApiFacade.js +4 -0
- package/core/api/BaseUrlApiConfiguration.cjs +47 -5
- package/core/api/BaseUrlApiConfiguration.d.ts +23 -6
- package/core/api/BaseUrlApiConfiguration.js +45 -4
- package/core/api/BaseUrlApiConfiguration.test.cjs +11 -0
- package/core/api/BaseUrlApiConfiguration.test.d.ts +1 -0
- package/core/api/BaseUrlApiConfiguration.test.js +9 -0
- package/core/api/callWithRetryAndThrottle.cjs +3 -3
- package/core/api/callWithRetryAndThrottle.js +3 -3
- package/core/api/index.cjs +15 -2
- package/core/api/index.d.ts +2 -2
- package/core/api/index.js +2 -2
- package/core/api/postToApi.cjs +28 -5
- package/core/api/postToApi.d.ts +5 -4
- package/core/api/postToApi.js +26 -4
- package/core/api/throttleOff.cjs +8 -0
- package/core/api/throttleOff.d.ts +5 -0
- package/core/api/throttleOff.js +4 -0
- package/internal/index.cjs +2 -8
- package/internal/index.d.ts +1 -1
- package/internal/index.js +1 -1
- package/model-function/generate-structure/generateStructure.d.ts +1 -1
- package/model-provider/anthropic/AnthropicApiConfiguration.cjs +14 -6
- package/model-provider/anthropic/AnthropicApiConfiguration.d.ts +7 -8
- package/model-provider/anthropic/AnthropicApiConfiguration.js +15 -7
- package/model-provider/anthropic/AnthropicError.cjs +7 -27
- package/model-provider/anthropic/AnthropicError.d.ts +21 -16
- package/model-provider/anthropic/AnthropicError.js +7 -25
- package/model-provider/anthropic/AnthropicFacade.cjs +10 -1
- package/model-provider/anthropic/AnthropicFacade.d.ts +9 -0
- package/model-provider/anthropic/AnthropicFacade.js +8 -0
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +22 -24
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +23 -25
- package/model-provider/anthropic/index.cjs +1 -4
- package/model-provider/anthropic/index.d.ts +1 -1
- package/model-provider/anthropic/index.js +0 -1
- package/model-provider/automatic1111/Automatic1111ApiConfiguration.cjs +12 -4
- package/model-provider/automatic1111/Automatic1111ApiConfiguration.d.ts +3 -3
- package/model-provider/automatic1111/Automatic1111ApiConfiguration.js +12 -4
- package/model-provider/automatic1111/Automatic1111Error.cjs +3 -3
- package/model-provider/automatic1111/Automatic1111Error.d.ts +13 -3
- package/model-provider/automatic1111/Automatic1111Error.js +4 -4
- package/model-provider/automatic1111/Automatic1111Facade.cjs +9 -9
- package/model-provider/automatic1111/Automatic1111Facade.d.ts +6 -6
- package/model-provider/automatic1111/Automatic1111Facade.js +7 -7
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +11 -7
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +11 -7
- package/model-provider/cohere/CohereApiConfiguration.cjs +14 -6
- package/model-provider/cohere/CohereApiConfiguration.d.ts +7 -8
- package/model-provider/cohere/CohereApiConfiguration.js +15 -7
- package/model-provider/cohere/CohereError.cjs +8 -43
- package/model-provider/cohere/CohereError.d.ts +9 -16
- package/model-provider/cohere/CohereError.js +8 -41
- package/model-provider/cohere/CohereFacade.cjs +12 -3
- package/model-provider/cohere/CohereFacade.d.ts +11 -2
- package/model-provider/cohere/CohereFacade.js +10 -2
- package/model-provider/cohere/CohereTextEmbeddingModel.cjs +18 -22
- package/model-provider/cohere/CohereTextEmbeddingModel.js +18 -22
- package/model-provider/cohere/CohereTextGenerationModel.cjs +31 -39
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +110 -8
- package/model-provider/cohere/CohereTextGenerationModel.js +31 -39
- package/model-provider/cohere/CohereTokenizer.cjs +32 -41
- package/model-provider/cohere/CohereTokenizer.d.ts +2 -2
- package/model-provider/cohere/CohereTokenizer.js +32 -41
- package/model-provider/cohere/index.cjs +1 -3
- package/model-provider/cohere/index.d.ts +1 -1
- package/model-provider/cohere/index.js +0 -1
- package/model-provider/elevenlabs/ElevenLabsApiConfiguration.cjs +14 -6
- package/model-provider/elevenlabs/ElevenLabsApiConfiguration.d.ts +7 -8
- package/model-provider/elevenlabs/ElevenLabsApiConfiguration.js +15 -7
- package/model-provider/elevenlabs/ElevenLabsFacade.cjs +10 -1
- package/model-provider/elevenlabs/ElevenLabsFacade.d.ts +9 -0
- package/model-provider/elevenlabs/ElevenLabsFacade.js +8 -0
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +42 -53
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +44 -55
- package/model-provider/huggingface/HuggingFaceApiConfiguration.cjs +14 -6
- package/model-provider/huggingface/HuggingFaceApiConfiguration.d.ts +7 -8
- package/model-provider/huggingface/HuggingFaceApiConfiguration.js +15 -7
- package/model-provider/huggingface/HuggingFaceError.cjs +7 -29
- package/model-provider/huggingface/HuggingFaceError.d.ts +9 -16
- package/model-provider/huggingface/HuggingFaceError.js +7 -27
- package/model-provider/huggingface/HuggingFaceFacade.cjs +10 -1
- package/model-provider/huggingface/HuggingFaceFacade.d.ts +9 -0
- package/model-provider/huggingface/HuggingFaceFacade.js +8 -0
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +17 -27
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +17 -27
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +22 -23
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +22 -23
- package/model-provider/huggingface/index.cjs +1 -3
- package/model-provider/huggingface/index.d.ts +1 -1
- package/model-provider/huggingface/index.js +0 -1
- package/model-provider/llamacpp/LlamaCppApiConfiguration.cjs +13 -6
- package/model-provider/llamacpp/LlamaCppApiConfiguration.d.ts +7 -9
- package/model-provider/llamacpp/LlamaCppApiConfiguration.js +14 -7
- package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +4 -4
- package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +173 -5
- package/model-provider/llamacpp/LlamaCppCompletionModel.js +4 -4
- package/model-provider/llamacpp/LlamaCppError.cjs +7 -27
- package/model-provider/llamacpp/LlamaCppError.d.ts +9 -16
- package/model-provider/llamacpp/LlamaCppError.js +7 -25
- package/model-provider/llamacpp/LlamaCppFacade.cjs +10 -2
- package/model-provider/llamacpp/LlamaCppFacade.d.ts +8 -1
- package/model-provider/llamacpp/LlamaCppFacade.js +8 -1
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +10 -14
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +10 -14
- package/model-provider/llamacpp/LlamaCppTokenizer.cjs +14 -18
- package/model-provider/llamacpp/LlamaCppTokenizer.js +14 -18
- package/model-provider/llamacpp/index.cjs +1 -3
- package/model-provider/llamacpp/index.d.ts +1 -1
- package/model-provider/llamacpp/index.js +0 -1
- package/model-provider/lmnt/LmntApiConfiguration.cjs +14 -6
- package/model-provider/lmnt/LmntApiConfiguration.d.ts +7 -8
- package/model-provider/lmnt/LmntApiConfiguration.js +15 -7
- package/model-provider/lmnt/LmntFacade.cjs +11 -2
- package/model-provider/lmnt/LmntFacade.d.ts +10 -1
- package/model-provider/lmnt/LmntFacade.js +9 -1
- package/model-provider/lmnt/LmntSpeechModel.cjs +53 -41
- package/model-provider/lmnt/LmntSpeechModel.d.ts +51 -3
- package/model-provider/lmnt/LmntSpeechModel.js +54 -42
- package/model-provider/mistral/MistralApiConfiguration.cjs +14 -6
- package/model-provider/mistral/MistralApiConfiguration.d.ts +9 -11
- package/model-provider/mistral/MistralApiConfiguration.js +15 -7
- package/model-provider/mistral/MistralChatModel.cjs +4 -4
- package/model-provider/mistral/MistralChatModel.d.ts +48 -3
- package/model-provider/mistral/MistralChatModel.js +5 -5
- package/model-provider/mistral/MistralError.cjs +3 -3
- package/model-provider/mistral/MistralError.d.ts +15 -3
- package/model-provider/mistral/MistralError.js +4 -4
- package/model-provider/mistral/MistralFacade.cjs +5 -1
- package/model-provider/mistral/MistralFacade.d.ts +10 -3
- package/model-provider/mistral/MistralFacade.js +6 -2
- package/model-provider/mistral/MistralTextEmbeddingModel.cjs +2 -1
- package/model-provider/mistral/MistralTextEmbeddingModel.js +2 -1
- package/model-provider/ollama/OllamaApiConfiguration.cjs +13 -6
- package/model-provider/ollama/OllamaApiConfiguration.d.ts +7 -10
- package/model-provider/ollama/OllamaApiConfiguration.js +14 -7
- package/model-provider/ollama/OllamaChatModel.cjs +4 -4
- package/model-provider/ollama/OllamaChatModel.d.ts +46 -5
- package/model-provider/ollama/OllamaChatModel.js +5 -5
- package/model-provider/ollama/OllamaCompletionModel.cjs +4 -4
- package/model-provider/ollama/OllamaCompletionModel.d.ts +40 -5
- package/model-provider/ollama/OllamaCompletionModel.js +5 -5
- package/model-provider/ollama/OllamaError.cjs +3 -3
- package/model-provider/ollama/OllamaError.d.ts +7 -3
- package/model-provider/ollama/OllamaError.js +4 -4
- package/model-provider/ollama/OllamaFacade.cjs +6 -2
- package/model-provider/ollama/OllamaFacade.d.ts +8 -3
- package/model-provider/ollama/OllamaFacade.js +6 -2
- package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +15 -16
- package/model-provider/ollama/OllamaTextEmbeddingModel.js +15 -16
- package/model-provider/openai/AbstractOpenAIChatModel.cjs +4 -4
- package/model-provider/openai/AbstractOpenAIChatModel.d.ts +148 -2
- package/model-provider/openai/AbstractOpenAIChatModel.js +4 -4
- package/model-provider/openai/AbstractOpenAICompletionModel.cjs +30 -32
- package/model-provider/openai/AbstractOpenAICompletionModel.js +30 -32
- package/model-provider/openai/AzureOpenAIApiConfiguration.d.ts +9 -8
- package/model-provider/openai/OpenAIApiConfiguration.cjs +14 -6
- package/model-provider/openai/OpenAIApiConfiguration.d.ts +7 -8
- package/model-provider/openai/OpenAIApiConfiguration.js +15 -7
- package/model-provider/openai/OpenAIError.cjs +8 -8
- package/model-provider/openai/OpenAIError.d.ts +27 -3
- package/model-provider/openai/OpenAIError.js +9 -9
- package/model-provider/openai/OpenAIFacade.cjs +23 -2
- package/model-provider/openai/OpenAIFacade.d.ts +20 -2
- package/model-provider/openai/OpenAIFacade.js +20 -1
- package/model-provider/openai/OpenAIImageGenerationModel.cjs +20 -21
- package/model-provider/openai/OpenAIImageGenerationModel.js +20 -21
- package/model-provider/openai/OpenAISpeechModel.cjs +17 -22
- package/model-provider/openai/OpenAISpeechModel.js +17 -22
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +18 -23
- package/model-provider/openai/OpenAITextEmbeddingModel.js +18 -23
- package/model-provider/openai/OpenAITranscriptionModel.cjs +42 -48
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +14 -10
- package/model-provider/openai/OpenAITranscriptionModel.js +42 -48
- package/model-provider/openai-compatible/FireworksAIApiConfiguration.cjs +11 -7
- package/model-provider/openai-compatible/FireworksAIApiConfiguration.d.ts +4 -9
- package/model-provider/openai-compatible/FireworksAIApiConfiguration.js +12 -8
- package/model-provider/openai-compatible/OpenAICompatibleFacade.cjs +25 -1
- package/model-provider/openai-compatible/OpenAICompatibleFacade.d.ts +23 -0
- package/model-provider/openai-compatible/OpenAICompatibleFacade.js +22 -0
- package/model-provider/openai-compatible/TogetherAIApiConfiguration.cjs +11 -7
- package/model-provider/openai-compatible/TogetherAIApiConfiguration.d.ts +4 -9
- package/model-provider/openai-compatible/TogetherAIApiConfiguration.js +12 -8
- package/model-provider/stability/StabilityApiConfiguration.cjs +13 -12
- package/model-provider/stability/StabilityApiConfiguration.d.ts +4 -4
- package/model-provider/stability/StabilityApiConfiguration.js +13 -12
- package/model-provider/stability/StabilityError.cjs +3 -3
- package/model-provider/stability/StabilityError.d.ts +7 -3
- package/model-provider/stability/StabilityError.js +4 -4
- package/model-provider/stability/StabilityFacade.cjs +9 -9
- package/model-provider/stability/StabilityFacade.d.ts +8 -8
- package/model-provider/stability/StabilityFacade.js +7 -7
- package/model-provider/stability/StabilityImageGenerationModel.cjs +2 -1
- package/model-provider/stability/StabilityImageGenerationModel.js +2 -1
- package/model-provider/whispercpp/WhisperCppApiConfiguration.cjs +13 -6
- package/model-provider/whispercpp/WhisperCppApiConfiguration.d.ts +7 -10
- package/model-provider/whispercpp/WhisperCppApiConfiguration.js +14 -7
- package/model-provider/whispercpp/WhisperCppFacade.cjs +9 -5
- package/model-provider/whispercpp/WhisperCppFacade.d.ts +7 -2
- package/model-provider/whispercpp/WhisperCppFacade.js +8 -4
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +5 -2
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +6 -3
- package/package.json +1 -1
- package/tool/WebSearchTool.cjs +2 -2
- package/tool/WebSearchTool.d.ts +1 -2
- package/tool/WebSearchTool.js +3 -3
- package/vector-index/memory/MemoryVectorIndex.cjs +2 -2
- package/vector-index/memory/MemoryVectorIndex.js +3 -3
- package/core/api/BaseUrlPartsApiConfiguration.cjs +0 -53
- package/core/api/BaseUrlPartsApiConfiguration.d.ts +0 -26
- package/core/api/BaseUrlPartsApiConfiguration.js +0 -49
- package/core/api/throttleUnlimitedConcurrency.cjs +0 -8
- package/core/api/throttleUnlimitedConcurrency.d.ts +0 -5
- package/core/api/throttleUnlimitedConcurrency.js +0 -4
- package/model-provider/elevenlabs/ElevenLabsError.cjs +0 -30
- package/model-provider/elevenlabs/ElevenLabsError.d.ts +0 -3
- package/model-provider/elevenlabs/ElevenLabsError.js +0 -26
- package/model-provider/lmnt/LmntError.cjs +0 -30
- package/model-provider/lmnt/LmntError.d.ts +0 -3
- package/model-provider/lmnt/LmntError.js +0 -26
@@ -1,19 +1,19 @@
|
|
1
1
|
import { z } from "zod";
|
2
2
|
import { createJsonErrorResponseHandler } from "../../core/api/postToApi.js";
|
3
|
-
import {
|
4
|
-
const openAIErrorDataSchema =
|
3
|
+
import { zodSchema } from "../../core/schema/ZodSchema.js";
|
4
|
+
const openAIErrorDataSchema = z.object({
|
5
5
|
error: z.object({
|
6
6
|
message: z.string(),
|
7
7
|
type: z.string(),
|
8
8
|
param: z.any().nullable(),
|
9
9
|
code: z.string().nullable(),
|
10
10
|
}),
|
11
|
-
})
|
11
|
+
});
|
12
12
|
export const failedOpenAICallResponseHandler = createJsonErrorResponseHandler({
|
13
|
-
errorSchema: openAIErrorDataSchema,
|
14
|
-
errorToMessage: (
|
15
|
-
isRetryable: (
|
16
|
-
|
17
|
-
|
18
|
-
|
13
|
+
errorSchema: zodSchema(openAIErrorDataSchema),
|
14
|
+
errorToMessage: (data) => data.error.message,
|
15
|
+
isRetryable: (response, error) => response.status >= 500 ||
|
16
|
+
(response.status === 429 &&
|
17
|
+
// insufficient_quota is also reported as a 429, but it's not retryable:
|
18
|
+
error?.error.type !== "insufficient_quota"),
|
19
19
|
});
|
@@ -1,13 +1,34 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.ChatMessage = exports.Tokenizer = exports.ImageGenerator = exports.Transcriber = exports.SpeechGenerator = exports.TextEmbedder = exports.ChatTextGenerator = exports.CompletionTextGenerator = void 0;
|
3
|
+
exports.ChatMessage = exports.Tokenizer = exports.ImageGenerator = exports.Transcriber = exports.SpeechGenerator = exports.TextEmbedder = exports.ChatTextGenerator = exports.CompletionTextGenerator = exports.AzureApi = exports.Api = void 0;
|
4
|
+
const AzureOpenAIApiConfiguration_js_1 = require("./AzureOpenAIApiConfiguration.cjs");
|
5
|
+
const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
|
6
|
+
const OpenAIChatModel_js_1 = require("./OpenAIChatModel.cjs");
|
4
7
|
const OpenAICompletionModel_js_1 = require("./OpenAICompletionModel.cjs");
|
5
8
|
const OpenAIImageGenerationModel_js_1 = require("./OpenAIImageGenerationModel.cjs");
|
6
9
|
const OpenAISpeechModel_js_1 = require("./OpenAISpeechModel.cjs");
|
7
10
|
const OpenAITextEmbeddingModel_js_1 = require("./OpenAITextEmbeddingModel.cjs");
|
8
11
|
const OpenAITranscriptionModel_js_1 = require("./OpenAITranscriptionModel.cjs");
|
9
12
|
const TikTokenTokenizer_js_1 = require("./TikTokenTokenizer.cjs");
|
10
|
-
|
13
|
+
/**
|
14
|
+
* Creates an API configuration for the OpenAI API.
|
15
|
+
* It calls the API at https://api.openai.com/v1 and uses the `OPENAI_API_KEY` env variable by default.
|
16
|
+
*/
|
17
|
+
function Api(settings) {
|
18
|
+
return new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration(settings);
|
19
|
+
}
|
20
|
+
exports.Api = Api;
|
21
|
+
/**
|
22
|
+
* Configuration for the Azure OpenAI API. This class is responsible for constructing URLs specific to the Azure OpenAI deployment.
|
23
|
+
* It creates URLs of the form
|
24
|
+
* `https://[resourceName].openai.azure.com/openai/deployments/[deploymentId]/[path]?api-version=[apiVersion]`
|
25
|
+
*
|
26
|
+
* @see https://learn.microsoft.com/en-us/azure/ai-services/openai/reference
|
27
|
+
*/
|
28
|
+
function AzureApi(settings) {
|
29
|
+
return new AzureOpenAIApiConfiguration_js_1.AzureOpenAIApiConfiguration(settings);
|
30
|
+
}
|
31
|
+
exports.AzureApi = AzureApi;
|
11
32
|
/**
|
12
33
|
* Create a text generation model that calls the OpenAI text completion API.
|
13
34
|
*
|
@@ -1,10 +1,28 @@
|
|
1
|
+
import { PartialBaseUrlPartsApiConfigurationOptions } from "../../core/api/BaseUrlApiConfiguration.js";
|
2
|
+
import { AzureOpenAIApiConfiguration, AzureOpenAIApiConfigurationOptions } from "./AzureOpenAIApiConfiguration.js";
|
3
|
+
import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
|
4
|
+
import { OpenAIChatModel, OpenAIChatSettings } from "./OpenAIChatModel.js";
|
1
5
|
import { OpenAICompletionModel, OpenAICompletionModelSettings } from "./OpenAICompletionModel.js";
|
2
6
|
import { OpenAIImageGenerationModel, OpenAIImageGenerationSettings } from "./OpenAIImageGenerationModel.js";
|
3
7
|
import { OpenAISpeechModel, OpenAISpeechModelSettings } from "./OpenAISpeechModel.js";
|
4
8
|
import { OpenAITextEmbeddingModel, OpenAITextEmbeddingModelSettings } from "./OpenAITextEmbeddingModel.js";
|
5
9
|
import { OpenAITranscriptionModel, OpenAITranscriptionModelSettings } from "./OpenAITranscriptionModel.js";
|
6
10
|
import { TikTokenTokenizer, TikTokenTokenizerSettings } from "./TikTokenTokenizer.js";
|
7
|
-
|
11
|
+
/**
|
12
|
+
* Creates an API configuration for the OpenAI API.
|
13
|
+
* It calls the API at https://api.openai.com/v1 and uses the `OPENAI_API_KEY` env variable by default.
|
14
|
+
*/
|
15
|
+
export declare function Api(settings: PartialBaseUrlPartsApiConfigurationOptions & {
|
16
|
+
apiKey?: string;
|
17
|
+
}): OpenAIApiConfiguration;
|
18
|
+
/**
|
19
|
+
* Configuration for the Azure OpenAI API. This class is responsible for constructing URLs specific to the Azure OpenAI deployment.
|
20
|
+
* It creates URLs of the form
|
21
|
+
* `https://[resourceName].openai.azure.com/openai/deployments/[deploymentId]/[path]?api-version=[apiVersion]`
|
22
|
+
*
|
23
|
+
* @see https://learn.microsoft.com/en-us/azure/ai-services/openai/reference
|
24
|
+
*/
|
25
|
+
export declare function AzureApi(settings: AzureOpenAIApiConfigurationOptions): AzureOpenAIApiConfiguration;
|
8
26
|
/**
|
9
27
|
* Create a text generation model that calls the OpenAI text completion API.
|
10
28
|
*
|
@@ -124,5 +142,5 @@ export declare function ImageGenerator(settings: OpenAIImageGenerationSettings):
|
|
124
142
|
* @returns A new instance of {@link TikTokenTokenizer}.
|
125
143
|
*/
|
126
144
|
export declare function Tokenizer(settings: TikTokenTokenizerSettings): TikTokenTokenizer;
|
127
|
-
export { OpenAIChatMessage as ChatMessage } from "./OpenAIChatMessage.js";
|
128
145
|
export { OpenAIChatPrompt as ChatPrompt } from "./AbstractOpenAIChatModel.js";
|
146
|
+
export { OpenAIChatMessage as ChatMessage } from "./OpenAIChatMessage.js";
|
@@ -1,10 +1,29 @@
|
|
1
|
+
import { AzureOpenAIApiConfiguration, } from "./AzureOpenAIApiConfiguration.js";
|
2
|
+
import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
|
3
|
+
import { OpenAIChatModel } from "./OpenAIChatModel.js";
|
1
4
|
import { OpenAICompletionModel, } from "./OpenAICompletionModel.js";
|
2
5
|
import { OpenAIImageGenerationModel, } from "./OpenAIImageGenerationModel.js";
|
3
6
|
import { OpenAISpeechModel, } from "./OpenAISpeechModel.js";
|
4
7
|
import { OpenAITextEmbeddingModel, } from "./OpenAITextEmbeddingModel.js";
|
5
8
|
import { OpenAITranscriptionModel, } from "./OpenAITranscriptionModel.js";
|
6
9
|
import { TikTokenTokenizer, } from "./TikTokenTokenizer.js";
|
7
|
-
|
10
|
+
/**
|
11
|
+
* Creates an API configuration for the OpenAI API.
|
12
|
+
* It calls the API at https://api.openai.com/v1 and uses the `OPENAI_API_KEY` env variable by default.
|
13
|
+
*/
|
14
|
+
export function Api(settings) {
|
15
|
+
return new OpenAIApiConfiguration(settings);
|
16
|
+
}
|
17
|
+
/**
|
18
|
+
* Configuration for the Azure OpenAI API. This class is responsible for constructing URLs specific to the Azure OpenAI deployment.
|
19
|
+
* It creates URLs of the form
|
20
|
+
* `https://[resourceName].openai.azure.com/openai/deployments/[deploymentId]/[path]?api-version=[apiVersion]`
|
21
|
+
*
|
22
|
+
* @see https://learn.microsoft.com/en-us/azure/ai-services/openai/reference
|
23
|
+
*/
|
24
|
+
export function AzureApi(settings) {
|
25
|
+
return new AzureOpenAIApiConfiguration(settings);
|
26
|
+
}
|
8
27
|
/**
|
9
28
|
* Create a text generation model that calls the OpenAI text completion API.
|
10
29
|
*
|
@@ -4,6 +4,7 @@ exports.OpenAIImageGenerationResponseFormat = exports.OpenAIImageGenerationModel
|
|
4
4
|
const zod_1 = require("zod");
|
5
5
|
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
6
6
|
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
7
|
+
const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
|
7
8
|
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
8
9
|
const PromptTemplateImageGenerationModel_js_1 = require("../../model-function/generate-image/PromptTemplateImageGenerationModel.cjs");
|
9
10
|
const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
|
@@ -89,28 +90,26 @@ class OpenAIImageGenerationModel extends AbstractModel_js_1.AbstractModel {
|
|
89
90
|
}
|
90
91
|
async callAPI(prompt, options) {
|
91
92
|
const api = this.settings.api ?? new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration();
|
92
|
-
const abortSignal = options
|
93
|
-
const userId = options
|
94
|
-
const responseFormat = options
|
93
|
+
const abortSignal = options.run?.abortSignal;
|
94
|
+
const userId = options.run?.userId;
|
95
|
+
const responseFormat = options.responseFormat;
|
95
96
|
return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
|
96
97
|
retry: api.retry,
|
97
98
|
throttle: api.throttle,
|
98
|
-
call: async () => {
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
});
|
113
|
-
},
|
99
|
+
call: async () => (0, postToApi_js_1.postJsonToApi)({
|
100
|
+
url: api.assembleUrl("/images/generations"),
|
101
|
+
headers: api.headers,
|
102
|
+
body: {
|
103
|
+
prompt,
|
104
|
+
n: this.settings.numberOfGenerations,
|
105
|
+
size: this.settings.size,
|
106
|
+
response_format: responseFormat.type,
|
107
|
+
user: this.settings.isUserIdForwardingEnabled ? userId : undefined,
|
108
|
+
},
|
109
|
+
failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
|
110
|
+
successfulResponseHandler: responseFormat.handler,
|
111
|
+
abortSignal,
|
112
|
+
}),
|
114
113
|
});
|
115
114
|
}
|
116
115
|
get settingsForEvent() {
|
@@ -158,10 +157,10 @@ const openAIImageGenerationBase64JsonSchema = zod_1.z.object({
|
|
158
157
|
exports.OpenAIImageGenerationResponseFormat = {
|
159
158
|
url: {
|
160
159
|
type: "url",
|
161
|
-
handler: (0, postToApi_js_1.createJsonResponseHandler)(openAIImageGenerationUrlSchema),
|
160
|
+
handler: (0, postToApi_js_1.createJsonResponseHandler)((0, ZodSchema_js_1.zodSchema)(openAIImageGenerationUrlSchema)),
|
162
161
|
},
|
163
162
|
base64Json: {
|
164
163
|
type: "b64_json",
|
165
|
-
handler: (0, postToApi_js_1.createJsonResponseHandler)(openAIImageGenerationBase64JsonSchema),
|
164
|
+
handler: (0, postToApi_js_1.createJsonResponseHandler)((0, ZodSchema_js_1.zodSchema)(openAIImageGenerationBase64JsonSchema)),
|
166
165
|
},
|
167
166
|
};
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import { z } from "zod";
|
2
2
|
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
3
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
|
+
import { zodSchema } from "../../core/schema/ZodSchema.js";
|
4
5
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
5
6
|
import { PromptTemplateImageGenerationModel } from "../../model-function/generate-image/PromptTemplateImageGenerationModel.js";
|
6
7
|
import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
|
@@ -85,28 +86,26 @@ export class OpenAIImageGenerationModel extends AbstractModel {
|
|
85
86
|
}
|
86
87
|
async callAPI(prompt, options) {
|
87
88
|
const api = this.settings.api ?? new OpenAIApiConfiguration();
|
88
|
-
const abortSignal = options
|
89
|
-
const userId = options
|
90
|
-
const responseFormat = options
|
89
|
+
const abortSignal = options.run?.abortSignal;
|
90
|
+
const userId = options.run?.userId;
|
91
|
+
const responseFormat = options.responseFormat;
|
91
92
|
return callWithRetryAndThrottle({
|
92
93
|
retry: api.retry,
|
93
94
|
throttle: api.throttle,
|
94
|
-
call: async () => {
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
});
|
109
|
-
},
|
95
|
+
call: async () => postJsonToApi({
|
96
|
+
url: api.assembleUrl("/images/generations"),
|
97
|
+
headers: api.headers,
|
98
|
+
body: {
|
99
|
+
prompt,
|
100
|
+
n: this.settings.numberOfGenerations,
|
101
|
+
size: this.settings.size,
|
102
|
+
response_format: responseFormat.type,
|
103
|
+
user: this.settings.isUserIdForwardingEnabled ? userId : undefined,
|
104
|
+
},
|
105
|
+
failedResponseHandler: failedOpenAICallResponseHandler,
|
106
|
+
successfulResponseHandler: responseFormat.handler,
|
107
|
+
abortSignal,
|
108
|
+
}),
|
110
109
|
});
|
111
110
|
}
|
112
111
|
get settingsForEvent() {
|
@@ -153,10 +152,10 @@ const openAIImageGenerationBase64JsonSchema = z.object({
|
|
153
152
|
export const OpenAIImageGenerationResponseFormat = {
|
154
153
|
url: {
|
155
154
|
type: "url",
|
156
|
-
handler: createJsonResponseHandler(openAIImageGenerationUrlSchema),
|
155
|
+
handler: createJsonResponseHandler(zodSchema(openAIImageGenerationUrlSchema)),
|
157
156
|
},
|
158
157
|
base64Json: {
|
159
158
|
type: "b64_json",
|
160
|
-
handler: createJsonResponseHandler(openAIImageGenerationBase64JsonSchema),
|
159
|
+
handler: createJsonResponseHandler(zodSchema(openAIImageGenerationBase64JsonSchema)),
|
161
160
|
},
|
162
161
|
};
|
@@ -46,13 +46,24 @@ class OpenAISpeechModel extends AbstractModel_js_1.AbstractModel {
|
|
46
46
|
return this.settings.model;
|
47
47
|
}
|
48
48
|
async callAPI(text, options) {
|
49
|
+
const api = this.settings.api ?? new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration();
|
50
|
+
const abortSignal = options?.run?.abortSignal;
|
49
51
|
return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
|
50
|
-
retry:
|
51
|
-
throttle:
|
52
|
-
call: async () =>
|
53
|
-
|
54
|
-
|
55
|
-
|
52
|
+
retry: api.retry,
|
53
|
+
throttle: api.throttle,
|
54
|
+
call: async () => (0, postToApi_js_1.postJsonToApi)({
|
55
|
+
url: api.assembleUrl(`/audio/speech`),
|
56
|
+
headers: api.headers,
|
57
|
+
body: {
|
58
|
+
input: text,
|
59
|
+
voice: this.settings.voice,
|
60
|
+
speed: this.settings.speed,
|
61
|
+
model: this.settings.model,
|
62
|
+
response_format: this.settings.responseFormat,
|
63
|
+
},
|
64
|
+
failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
|
65
|
+
successfulResponseHandler: (0, postToApi_js_1.createAudioMpegResponseHandler)(),
|
66
|
+
abortSignal,
|
56
67
|
}),
|
57
68
|
});
|
58
69
|
}
|
@@ -75,19 +86,3 @@ class OpenAISpeechModel extends AbstractModel_js_1.AbstractModel {
|
|
75
86
|
}
|
76
87
|
}
|
77
88
|
exports.OpenAISpeechModel = OpenAISpeechModel;
|
78
|
-
async function callOpenAITextToSpeechAPI({ api = new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration(), abortSignal, text, voice, model, speed, responseFormat, }) {
|
79
|
-
return (0, postToApi_js_1.postJsonToApi)({
|
80
|
-
url: api.assembleUrl(`/audio/speech`),
|
81
|
-
headers: api.headers,
|
82
|
-
body: {
|
83
|
-
input: text,
|
84
|
-
voice,
|
85
|
-
speed,
|
86
|
-
model,
|
87
|
-
response_format: responseFormat,
|
88
|
-
},
|
89
|
-
failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
|
90
|
-
successfulResponseHandler: (0, postToApi_js_1.createAudioMpegResponseHandler)(),
|
91
|
-
abortSignal,
|
92
|
-
});
|
93
|
-
}
|
@@ -42,13 +42,24 @@ export class OpenAISpeechModel extends AbstractModel {
|
|
42
42
|
return this.settings.model;
|
43
43
|
}
|
44
44
|
async callAPI(text, options) {
|
45
|
+
const api = this.settings.api ?? new OpenAIApiConfiguration();
|
46
|
+
const abortSignal = options?.run?.abortSignal;
|
45
47
|
return callWithRetryAndThrottle({
|
46
|
-
retry:
|
47
|
-
throttle:
|
48
|
-
call: async () =>
|
49
|
-
|
50
|
-
|
51
|
-
|
48
|
+
retry: api.retry,
|
49
|
+
throttle: api.throttle,
|
50
|
+
call: async () => postJsonToApi({
|
51
|
+
url: api.assembleUrl(`/audio/speech`),
|
52
|
+
headers: api.headers,
|
53
|
+
body: {
|
54
|
+
input: text,
|
55
|
+
voice: this.settings.voice,
|
56
|
+
speed: this.settings.speed,
|
57
|
+
model: this.settings.model,
|
58
|
+
response_format: this.settings.responseFormat,
|
59
|
+
},
|
60
|
+
failedResponseHandler: failedOpenAICallResponseHandler,
|
61
|
+
successfulResponseHandler: createAudioMpegResponseHandler(),
|
62
|
+
abortSignal,
|
52
63
|
}),
|
53
64
|
});
|
54
65
|
}
|
@@ -70,19 +81,3 @@ export class OpenAISpeechModel extends AbstractModel {
|
|
70
81
|
});
|
71
82
|
}
|
72
83
|
}
|
73
|
-
async function callOpenAITextToSpeechAPI({ api = new OpenAIApiConfiguration(), abortSignal, text, voice, model, speed, responseFormat, }) {
|
74
|
-
return postJsonToApi({
|
75
|
-
url: api.assembleUrl(`/audio/speech`),
|
76
|
-
headers: api.headers,
|
77
|
-
body: {
|
78
|
-
input: text,
|
79
|
-
voice,
|
80
|
-
speed,
|
81
|
-
model,
|
82
|
-
response_format: responseFormat,
|
83
|
-
},
|
84
|
-
failedResponseHandler: failedOpenAICallResponseHandler,
|
85
|
-
successfulResponseHandler: createAudioMpegResponseHandler(),
|
86
|
-
abortSignal,
|
87
|
-
});
|
88
|
-
}
|
@@ -4,6 +4,7 @@ exports.OpenAITextEmbeddingModel = exports.calculateOpenAIEmbeddingCostInMillice
|
|
4
4
|
const zod_1 = require("zod");
|
5
5
|
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
6
6
|
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
7
|
+
const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
|
7
8
|
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
8
9
|
const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
|
9
10
|
const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
|
@@ -91,16 +92,24 @@ class OpenAITextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
91
92
|
return (0, countTokens_js_1.countTokens)(this.tokenizer, input);
|
92
93
|
}
|
93
94
|
async callAPI(texts, options) {
|
95
|
+
const api = this.settings.api ?? new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration();
|
96
|
+
const abortSignal = options?.run?.abortSignal;
|
94
97
|
return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
|
95
|
-
retry:
|
96
|
-
throttle:
|
97
|
-
call: async () =>
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
:
|
102
|
-
|
103
|
-
|
98
|
+
retry: api.retry,
|
99
|
+
throttle: api.throttle,
|
100
|
+
call: async () => (0, postToApi_js_1.postJsonToApi)({
|
101
|
+
url: api.assembleUrl("/embeddings"),
|
102
|
+
headers: api.headers,
|
103
|
+
body: {
|
104
|
+
model: this.modelName,
|
105
|
+
input: texts,
|
106
|
+
user: this.settings.isUserIdForwardingEnabled
|
107
|
+
? options?.run?.userId
|
108
|
+
: undefined,
|
109
|
+
},
|
110
|
+
failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
|
111
|
+
successfulResponseHandler: (0, postToApi_js_1.createJsonResponseHandler)((0, ZodSchema_js_1.zodSchema)(openAITextEmbeddingResponseSchema)),
|
112
|
+
abortSignal,
|
104
113
|
}),
|
105
114
|
});
|
106
115
|
}
|
@@ -135,17 +144,3 @@ const openAITextEmbeddingResponseSchema = zod_1.z.object({
|
|
135
144
|
total_tokens: zod_1.z.number(),
|
136
145
|
}),
|
137
146
|
});
|
138
|
-
async function callOpenAITextEmbeddingAPI({ api = new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration(), abortSignal, model, input, user, }) {
|
139
|
-
return (0, postToApi_js_1.postJsonToApi)({
|
140
|
-
url: api.assembleUrl("/embeddings"),
|
141
|
-
headers: api.headers,
|
142
|
-
body: {
|
143
|
-
model,
|
144
|
-
input,
|
145
|
-
user,
|
146
|
-
},
|
147
|
-
failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
|
148
|
-
successfulResponseHandler: (0, postToApi_js_1.createJsonResponseHandler)(openAITextEmbeddingResponseSchema),
|
149
|
-
abortSignal,
|
150
|
-
});
|
151
|
-
}
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import { z } from "zod";
|
2
2
|
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
3
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
|
+
import { zodSchema } from "../../core/schema/ZodSchema.js";
|
4
5
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
5
6
|
import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
|
6
7
|
import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
|
@@ -86,16 +87,24 @@ export class OpenAITextEmbeddingModel extends AbstractModel {
|
|
86
87
|
return countTokens(this.tokenizer, input);
|
87
88
|
}
|
88
89
|
async callAPI(texts, options) {
|
90
|
+
const api = this.settings.api ?? new OpenAIApiConfiguration();
|
91
|
+
const abortSignal = options?.run?.abortSignal;
|
89
92
|
return callWithRetryAndThrottle({
|
90
|
-
retry:
|
91
|
-
throttle:
|
92
|
-
call: async () =>
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
:
|
97
|
-
|
98
|
-
|
93
|
+
retry: api.retry,
|
94
|
+
throttle: api.throttle,
|
95
|
+
call: async () => postJsonToApi({
|
96
|
+
url: api.assembleUrl("/embeddings"),
|
97
|
+
headers: api.headers,
|
98
|
+
body: {
|
99
|
+
model: this.modelName,
|
100
|
+
input: texts,
|
101
|
+
user: this.settings.isUserIdForwardingEnabled
|
102
|
+
? options?.run?.userId
|
103
|
+
: undefined,
|
104
|
+
},
|
105
|
+
failedResponseHandler: failedOpenAICallResponseHandler,
|
106
|
+
successfulResponseHandler: createJsonResponseHandler(zodSchema(openAITextEmbeddingResponseSchema)),
|
107
|
+
abortSignal,
|
99
108
|
}),
|
100
109
|
});
|
101
110
|
}
|
@@ -129,17 +138,3 @@ const openAITextEmbeddingResponseSchema = z.object({
|
|
129
138
|
total_tokens: z.number(),
|
130
139
|
}),
|
131
140
|
});
|
132
|
-
async function callOpenAITextEmbeddingAPI({ api = new OpenAIApiConfiguration(), abortSignal, model, input, user, }) {
|
133
|
-
return postJsonToApi({
|
134
|
-
url: api.assembleUrl("/embeddings"),
|
135
|
-
headers: api.headers,
|
136
|
-
body: {
|
137
|
-
model,
|
138
|
-
input,
|
139
|
-
user,
|
140
|
-
},
|
141
|
-
failedResponseHandler: failedOpenAICallResponseHandler,
|
142
|
-
successfulResponseHandler: createJsonResponseHandler(openAITextEmbeddingResponseSchema),
|
143
|
-
abortSignal,
|
144
|
-
});
|
145
|
-
}
|
@@ -4,6 +4,7 @@ exports.OpenAITranscriptionResponseFormat = exports.OpenAITranscriptionModel = e
|
|
4
4
|
const zod_1 = require("zod");
|
5
5
|
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
6
6
|
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
7
|
+
const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
|
7
8
|
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
8
9
|
const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
|
9
10
|
const OpenAIError_js_1 = require("./OpenAIError.cjs");
|
@@ -65,19 +66,46 @@ class OpenAITranscriptionModel extends AbstractModel_js_1.AbstractModel {
|
|
65
66
|
};
|
66
67
|
}
|
67
68
|
async callAPI(data, options) {
|
69
|
+
const api = this.settings.api ?? new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration();
|
70
|
+
const abortSignal = options?.run?.abortSignal;
|
68
71
|
return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
|
69
|
-
retry:
|
70
|
-
throttle:
|
71
|
-
call: async () =>
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
}
|
79
|
-
|
80
|
-
|
72
|
+
retry: api.retry,
|
73
|
+
throttle: api.throttle,
|
74
|
+
call: async () => {
|
75
|
+
const fileName = `audio.${data.type}`;
|
76
|
+
const formData = new FormData();
|
77
|
+
formData.append("file", new Blob([data.data]), fileName);
|
78
|
+
formData.append("model", this.settings.model);
|
79
|
+
if (this.settings.prompt != null) {
|
80
|
+
formData.append("prompt", this.settings.prompt);
|
81
|
+
}
|
82
|
+
if (options.responseFormat != null) {
|
83
|
+
formData.append("response_format", options.responseFormat.type);
|
84
|
+
}
|
85
|
+
if (this.settings.temperature != null) {
|
86
|
+
formData.append("temperature", this.settings.temperature.toString());
|
87
|
+
}
|
88
|
+
if (this.settings.language != null) {
|
89
|
+
formData.append("language", this.settings.language);
|
90
|
+
}
|
91
|
+
return (0, postToApi_js_1.postToApi)({
|
92
|
+
url: api.assembleUrl("/audio/transcriptions"),
|
93
|
+
headers: api.headers,
|
94
|
+
body: {
|
95
|
+
content: formData,
|
96
|
+
values: {
|
97
|
+
model: this.settings.model,
|
98
|
+
prompt: this.settings.prompt,
|
99
|
+
response_format: options.responseFormat,
|
100
|
+
temperature: this.settings.temperature,
|
101
|
+
language: this.settings.language,
|
102
|
+
},
|
103
|
+
},
|
104
|
+
failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
|
105
|
+
successfulResponseHandler: options.responseFormat.handler,
|
106
|
+
abortSignal,
|
107
|
+
});
|
108
|
+
},
|
81
109
|
});
|
82
110
|
}
|
83
111
|
get settingsForEvent() {
|
@@ -91,40 +119,6 @@ class OpenAITranscriptionModel extends AbstractModel_js_1.AbstractModel {
|
|
91
119
|
}
|
92
120
|
}
|
93
121
|
exports.OpenAITranscriptionModel = OpenAITranscriptionModel;
|
94
|
-
async function callOpenAITranscriptionAPI({ api = new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration(), abortSignal, model, file, prompt, responseFormat, temperature, language, }) {
|
95
|
-
const formData = new FormData();
|
96
|
-
formData.append("file", new Blob([file.data]), file.name);
|
97
|
-
formData.append("model", model);
|
98
|
-
if (prompt) {
|
99
|
-
formData.append("prompt", prompt);
|
100
|
-
}
|
101
|
-
if (responseFormat) {
|
102
|
-
formData.append("response_format", responseFormat.type);
|
103
|
-
}
|
104
|
-
if (temperature) {
|
105
|
-
formData.append("temperature", temperature.toString());
|
106
|
-
}
|
107
|
-
if (language) {
|
108
|
-
formData.append("language", language);
|
109
|
-
}
|
110
|
-
return (0, postToApi_js_1.postToApi)({
|
111
|
-
url: api.assembleUrl("/audio/transcriptions"),
|
112
|
-
headers: api.headers,
|
113
|
-
body: {
|
114
|
-
content: formData,
|
115
|
-
values: {
|
116
|
-
model,
|
117
|
-
prompt,
|
118
|
-
response_format: responseFormat,
|
119
|
-
temperature,
|
120
|
-
language,
|
121
|
-
},
|
122
|
-
},
|
123
|
-
failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
|
124
|
-
successfulResponseHandler: responseFormat.handler,
|
125
|
-
abortSignal,
|
126
|
-
});
|
127
|
-
}
|
128
122
|
const openAITranscriptionJsonSchema = zod_1.z.object({
|
129
123
|
text: zod_1.z.string(),
|
130
124
|
});
|
@@ -150,11 +144,11 @@ const openAITranscriptionVerboseJsonSchema = zod_1.z.object({
|
|
150
144
|
exports.OpenAITranscriptionResponseFormat = {
|
151
145
|
json: {
|
152
146
|
type: "json",
|
153
|
-
handler: (0, postToApi_js_1.createJsonResponseHandler)(openAITranscriptionJsonSchema),
|
147
|
+
handler: (0, postToApi_js_1.createJsonResponseHandler)((0, ZodSchema_js_1.zodSchema)(openAITranscriptionJsonSchema)),
|
154
148
|
},
|
155
149
|
verboseJson: {
|
156
150
|
type: "verbose_json",
|
157
|
-
handler: (0, postToApi_js_1.createJsonResponseHandler)(openAITranscriptionVerboseJsonSchema),
|
151
|
+
handler: (0, postToApi_js_1.createJsonResponseHandler)((0, ZodSchema_js_1.zodSchema)(openAITranscriptionVerboseJsonSchema)),
|
158
152
|
},
|
159
153
|
text: {
|
160
154
|
type: "text",
|