modelfusion 0.0.44
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +429 -0
- package/composed-function/index.cjs +22 -0
- package/composed-function/index.d.ts +6 -0
- package/composed-function/index.js +6 -0
- package/composed-function/summarize/SummarizationFunction.cjs +2 -0
- package/composed-function/summarize/SummarizationFunction.d.ts +4 -0
- package/composed-function/summarize/SummarizationFunction.js +1 -0
- package/composed-function/summarize/summarizeRecursively.cjs +19 -0
- package/composed-function/summarize/summarizeRecursively.d.ts +11 -0
- package/composed-function/summarize/summarizeRecursively.js +15 -0
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +29 -0
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +24 -0
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +25 -0
- package/composed-function/use-tool/NoSuchToolError.cjs +17 -0
- package/composed-function/use-tool/NoSuchToolError.d.ts +4 -0
- package/composed-function/use-tool/NoSuchToolError.js +13 -0
- package/composed-function/use-tool/Tool.cjs +43 -0
- package/composed-function/use-tool/Tool.d.ts +15 -0
- package/composed-function/use-tool/Tool.js +39 -0
- package/composed-function/use-tool/useTool.cjs +59 -0
- package/composed-function/use-tool/useTool.d.ts +36 -0
- package/composed-function/use-tool/useTool.js +54 -0
- package/cost/Cost.cjs +38 -0
- package/cost/Cost.d.ts +16 -0
- package/cost/Cost.js +34 -0
- package/cost/CostCalculator.cjs +2 -0
- package/cost/CostCalculator.d.ts +8 -0
- package/cost/CostCalculator.js +1 -0
- package/cost/calculateCost.cjs +28 -0
- package/cost/calculateCost.d.ts +7 -0
- package/cost/calculateCost.js +24 -0
- package/cost/index.cjs +19 -0
- package/cost/index.d.ts +3 -0
- package/cost/index.js +3 -0
- package/index.cjs +25 -0
- package/index.d.ts +9 -0
- package/index.js +9 -0
- package/model-function/AbstractModel.cjs +22 -0
- package/model-function/AbstractModel.d.ts +12 -0
- package/model-function/AbstractModel.js +18 -0
- package/model-function/FunctionOptions.cjs +2 -0
- package/model-function/FunctionOptions.d.ts +6 -0
- package/model-function/FunctionOptions.js +1 -0
- package/model-function/Model.cjs +2 -0
- package/model-function/Model.d.ts +23 -0
- package/model-function/Model.js +1 -0
- package/model-function/ModelCallEvent.cjs +2 -0
- package/model-function/ModelCallEvent.d.ts +18 -0
- package/model-function/ModelCallEvent.js +1 -0
- package/model-function/ModelCallEventSource.cjs +42 -0
- package/model-function/ModelCallEventSource.d.ts +13 -0
- package/model-function/ModelCallEventSource.js +38 -0
- package/model-function/ModelCallObserver.cjs +2 -0
- package/model-function/ModelCallObserver.d.ts +5 -0
- package/model-function/ModelCallObserver.js +1 -0
- package/model-function/ModelInformation.cjs +2 -0
- package/model-function/ModelInformation.d.ts +4 -0
- package/model-function/ModelInformation.js +1 -0
- package/model-function/SuccessfulModelCall.cjs +22 -0
- package/model-function/SuccessfulModelCall.d.ts +9 -0
- package/model-function/SuccessfulModelCall.js +18 -0
- package/model-function/embed-text/TextEmbeddingEvent.cjs +2 -0
- package/model-function/embed-text/TextEmbeddingEvent.d.ts +23 -0
- package/model-function/embed-text/TextEmbeddingEvent.js +1 -0
- package/model-function/embed-text/TextEmbeddingModel.cjs +2 -0
- package/model-function/embed-text/TextEmbeddingModel.d.ts +18 -0
- package/model-function/embed-text/TextEmbeddingModel.js +1 -0
- package/model-function/embed-text/embedText.cjs +90 -0
- package/model-function/embed-text/embedText.d.ts +33 -0
- package/model-function/embed-text/embedText.js +85 -0
- package/model-function/executeCall.cjs +60 -0
- package/model-function/executeCall.d.ts +27 -0
- package/model-function/executeCall.js +56 -0
- package/model-function/generate-image/ImageGenerationEvent.cjs +2 -0
- package/model-function/generate-image/ImageGenerationEvent.d.ts +22 -0
- package/model-function/generate-image/ImageGenerationEvent.js +1 -0
- package/model-function/generate-image/ImageGenerationModel.cjs +2 -0
- package/model-function/generate-image/ImageGenerationModel.d.ts +8 -0
- package/model-function/generate-image/ImageGenerationModel.js +1 -0
- package/model-function/generate-image/generateImage.cjs +63 -0
- package/model-function/generate-image/generateImage.d.ts +23 -0
- package/model-function/generate-image/generateImage.js +59 -0
- package/model-function/generate-json/GenerateJsonModel.cjs +2 -0
- package/model-function/generate-json/GenerateJsonModel.d.ts +10 -0
- package/model-function/generate-json/GenerateJsonModel.js +1 -0
- package/model-function/generate-json/GenerateJsonOrTextModel.cjs +2 -0
- package/model-function/generate-json/GenerateJsonOrTextModel.d.ts +18 -0
- package/model-function/generate-json/GenerateJsonOrTextModel.js +1 -0
- package/model-function/generate-json/JsonGenerationEvent.cjs +2 -0
- package/model-function/generate-json/JsonGenerationEvent.d.ts +22 -0
- package/model-function/generate-json/JsonGenerationEvent.js +1 -0
- package/model-function/generate-json/NoSuchSchemaError.cjs +17 -0
- package/model-function/generate-json/NoSuchSchemaError.d.ts +4 -0
- package/model-function/generate-json/NoSuchSchemaError.js +13 -0
- package/model-function/generate-json/SchemaDefinition.cjs +2 -0
- package/model-function/generate-json/SchemaDefinition.d.ts +6 -0
- package/model-function/generate-json/SchemaDefinition.js +1 -0
- package/model-function/generate-json/SchemaValidationError.cjs +36 -0
- package/model-function/generate-json/SchemaValidationError.d.ts +11 -0
- package/model-function/generate-json/SchemaValidationError.js +32 -0
- package/model-function/generate-json/generateJson.cjs +61 -0
- package/model-function/generate-json/generateJson.d.ts +9 -0
- package/model-function/generate-json/generateJson.js +57 -0
- package/model-function/generate-json/generateJsonOrText.cjs +74 -0
- package/model-function/generate-json/generateJsonOrText.d.ts +25 -0
- package/model-function/generate-json/generateJsonOrText.js +70 -0
- package/model-function/generate-text/AsyncQueue.cjs +66 -0
- package/model-function/generate-text/AsyncQueue.d.ts +17 -0
- package/model-function/generate-text/AsyncQueue.js +62 -0
- package/model-function/generate-text/DeltaEvent.cjs +2 -0
- package/model-function/generate-text/DeltaEvent.d.ts +7 -0
- package/model-function/generate-text/DeltaEvent.js +1 -0
- package/model-function/generate-text/TextDeltaEventSource.cjs +54 -0
- package/model-function/generate-text/TextDeltaEventSource.d.ts +5 -0
- package/model-function/generate-text/TextDeltaEventSource.js +46 -0
- package/model-function/generate-text/TextGenerationEvent.cjs +2 -0
- package/model-function/generate-text/TextGenerationEvent.d.ts +22 -0
- package/model-function/generate-text/TextGenerationEvent.js +1 -0
- package/model-function/generate-text/TextGenerationModel.cjs +2 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +42 -0
- package/model-function/generate-text/TextGenerationModel.js +1 -0
- package/model-function/generate-text/TextStreamingEvent.cjs +2 -0
- package/model-function/generate-text/TextStreamingEvent.d.ts +22 -0
- package/model-function/generate-text/TextStreamingEvent.js +1 -0
- package/model-function/generate-text/extractTextDeltas.cjs +23 -0
- package/model-function/generate-text/extractTextDeltas.d.ts +7 -0
- package/model-function/generate-text/extractTextDeltas.js +19 -0
- package/model-function/generate-text/generateText.cjs +67 -0
- package/model-function/generate-text/generateText.d.ts +20 -0
- package/model-function/generate-text/generateText.js +63 -0
- package/model-function/generate-text/parseEventSourceReadableStream.cjs +30 -0
- package/model-function/generate-text/parseEventSourceReadableStream.d.ts +8 -0
- package/model-function/generate-text/parseEventSourceReadableStream.js +26 -0
- package/model-function/generate-text/streamText.cjs +115 -0
- package/model-function/generate-text/streamText.d.ts +11 -0
- package/model-function/generate-text/streamText.js +111 -0
- package/model-function/index.cjs +47 -0
- package/model-function/index.d.ts +31 -0
- package/model-function/index.js +31 -0
- package/model-function/tokenize-text/Tokenizer.cjs +2 -0
- package/model-function/tokenize-text/Tokenizer.d.ts +19 -0
- package/model-function/tokenize-text/Tokenizer.js +1 -0
- package/model-function/tokenize-text/countTokens.cjs +10 -0
- package/model-function/tokenize-text/countTokens.d.ts +5 -0
- package/model-function/tokenize-text/countTokens.js +6 -0
- package/model-function/transcribe-audio/TranscriptionEvent.cjs +2 -0
- package/model-function/transcribe-audio/TranscriptionEvent.d.ts +22 -0
- package/model-function/transcribe-audio/TranscriptionEvent.js +1 -0
- package/model-function/transcribe-audio/TranscriptionModel.cjs +2 -0
- package/model-function/transcribe-audio/TranscriptionModel.d.ts +8 -0
- package/model-function/transcribe-audio/TranscriptionModel.js +1 -0
- package/model-function/transcribe-audio/transcribe.cjs +62 -0
- package/model-function/transcribe-audio/transcribe.d.ts +22 -0
- package/model-function/transcribe-audio/transcribe.js +58 -0
- package/model-provider/automatic1111/Automatic1111Error.cjs +39 -0
- package/model-provider/automatic1111/Automatic1111Error.d.ts +31 -0
- package/model-provider/automatic1111/Automatic1111Error.js +31 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +76 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +54 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +72 -0
- package/model-provider/automatic1111/index.cjs +20 -0
- package/model-provider/automatic1111/index.d.ts +2 -0
- package/model-provider/automatic1111/index.js +2 -0
- package/model-provider/cohere/CohereError.cjs +36 -0
- package/model-provider/cohere/CohereError.d.ts +22 -0
- package/model-provider/cohere/CohereError.js +28 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.cjs +172 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +119 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.js +165 -0
- package/model-provider/cohere/CohereTextGenerationModel.cjs +283 -0
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +203 -0
- package/model-provider/cohere/CohereTextGenerationModel.js +276 -0
- package/model-provider/cohere/CohereTokenizer.cjs +136 -0
- package/model-provider/cohere/CohereTokenizer.d.ts +118 -0
- package/model-provider/cohere/CohereTokenizer.js +129 -0
- package/model-provider/cohere/index.cjs +22 -0
- package/model-provider/cohere/index.d.ts +4 -0
- package/model-provider/cohere/index.js +4 -0
- package/model-provider/huggingface/HuggingFaceError.cjs +52 -0
- package/model-provider/huggingface/HuggingFaceError.d.ts +22 -0
- package/model-provider/huggingface/HuggingFaceError.js +44 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +174 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +75 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +167 -0
- package/model-provider/huggingface/index.cjs +20 -0
- package/model-provider/huggingface/index.d.ts +2 -0
- package/model-provider/huggingface/index.js +2 -0
- package/model-provider/index.cjs +22 -0
- package/model-provider/index.d.ts +6 -0
- package/model-provider/index.js +6 -0
- package/model-provider/llamacpp/LlamaCppError.cjs +52 -0
- package/model-provider/llamacpp/LlamaCppError.d.ts +22 -0
- package/model-provider/llamacpp/LlamaCppError.js +44 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +96 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +40 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +89 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +245 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +399 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +238 -0
- package/model-provider/llamacpp/LlamaCppTokenizer.cjs +64 -0
- package/model-provider/llamacpp/LlamaCppTokenizer.d.ts +38 -0
- package/model-provider/llamacpp/LlamaCppTokenizer.js +57 -0
- package/model-provider/llamacpp/index.cjs +22 -0
- package/model-provider/llamacpp/index.d.ts +4 -0
- package/model-provider/llamacpp/index.js +4 -0
- package/model-provider/openai/OpenAICostCalculator.cjs +71 -0
- package/model-provider/openai/OpenAICostCalculator.d.ts +6 -0
- package/model-provider/openai/OpenAICostCalculator.js +67 -0
- package/model-provider/openai/OpenAIError.cjs +50 -0
- package/model-provider/openai/OpenAIError.d.ts +47 -0
- package/model-provider/openai/OpenAIError.js +42 -0
- package/model-provider/openai/OpenAIImageGenerationModel.cjs +124 -0
- package/model-provider/openai/OpenAIImageGenerationModel.d.ts +113 -0
- package/model-provider/openai/OpenAIImageGenerationModel.js +119 -0
- package/model-provider/openai/OpenAIModelSettings.cjs +2 -0
- package/model-provider/openai/OpenAIModelSettings.d.ts +8 -0
- package/model-provider/openai/OpenAIModelSettings.js +1 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +171 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +122 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.js +162 -0
- package/model-provider/openai/OpenAITextGenerationModel.cjs +326 -0
- package/model-provider/openai/OpenAITextGenerationModel.d.ts +254 -0
- package/model-provider/openai/OpenAITextGenerationModel.js +317 -0
- package/model-provider/openai/OpenAITranscriptionModel.cjs +195 -0
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +196 -0
- package/model-provider/openai/OpenAITranscriptionModel.js +187 -0
- package/model-provider/openai/TikTokenTokenizer.cjs +86 -0
- package/model-provider/openai/TikTokenTokenizer.d.ts +35 -0
- package/model-provider/openai/TikTokenTokenizer.js +82 -0
- package/model-provider/openai/chat/OpenAIChatMessage.cjs +24 -0
- package/model-provider/openai/chat/OpenAIChatMessage.d.ts +26 -0
- package/model-provider/openai/chat/OpenAIChatMessage.js +21 -0
- package/model-provider/openai/chat/OpenAIChatModel.cjs +288 -0
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +344 -0
- package/model-provider/openai/chat/OpenAIChatModel.js +279 -0
- package/model-provider/openai/chat/OpenAIChatPrompt.cjs +143 -0
- package/model-provider/openai/chat/OpenAIChatPrompt.d.ts +108 -0
- package/model-provider/openai/chat/OpenAIChatPrompt.js +135 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +112 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +19 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +105 -0
- package/model-provider/openai/chat/countOpenAIChatMessageTokens.cjs +28 -0
- package/model-provider/openai/chat/countOpenAIChatMessageTokens.d.ts +20 -0
- package/model-provider/openai/chat/countOpenAIChatMessageTokens.js +23 -0
- package/model-provider/openai/index.cjs +31 -0
- package/model-provider/openai/index.d.ts +13 -0
- package/model-provider/openai/index.js +12 -0
- package/model-provider/stability/StabilityError.cjs +36 -0
- package/model-provider/stability/StabilityError.d.ts +22 -0
- package/model-provider/stability/StabilityError.js +28 -0
- package/model-provider/stability/StabilityImageGenerationModel.cjs +133 -0
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +95 -0
- package/model-provider/stability/StabilityImageGenerationModel.js +129 -0
- package/model-provider/stability/index.cjs +20 -0
- package/model-provider/stability/index.d.ts +2 -0
- package/model-provider/stability/index.js +2 -0
- package/package.json +87 -0
- package/prompt/InstructionPrompt.cjs +2 -0
- package/prompt/InstructionPrompt.d.ts +7 -0
- package/prompt/InstructionPrompt.js +1 -0
- package/prompt/Llama2PromptMapping.cjs +56 -0
- package/prompt/Llama2PromptMapping.d.ts +10 -0
- package/prompt/Llama2PromptMapping.js +51 -0
- package/prompt/OpenAIChatPromptMapping.cjs +62 -0
- package/prompt/OpenAIChatPromptMapping.d.ts +6 -0
- package/prompt/OpenAIChatPromptMapping.js +57 -0
- package/prompt/PromptMapping.cjs +2 -0
- package/prompt/PromptMapping.d.ts +7 -0
- package/prompt/PromptMapping.js +1 -0
- package/prompt/PromptMappingTextGenerationModel.cjs +88 -0
- package/prompt/PromptMappingTextGenerationModel.d.ts +26 -0
- package/prompt/PromptMappingTextGenerationModel.js +84 -0
- package/prompt/TextPromptMapping.cjs +50 -0
- package/prompt/TextPromptMapping.d.ts +14 -0
- package/prompt/TextPromptMapping.js +45 -0
- package/prompt/chat/ChatPrompt.cjs +2 -0
- package/prompt/chat/ChatPrompt.d.ts +33 -0
- package/prompt/chat/ChatPrompt.js +1 -0
- package/prompt/chat/trimChatPrompt.cjs +50 -0
- package/prompt/chat/trimChatPrompt.d.ts +19 -0
- package/prompt/chat/trimChatPrompt.js +46 -0
- package/prompt/chat/validateChatPrompt.cjs +36 -0
- package/prompt/chat/validateChatPrompt.d.ts +8 -0
- package/prompt/chat/validateChatPrompt.js +31 -0
- package/prompt/index.cjs +25 -0
- package/prompt/index.d.ts +9 -0
- package/prompt/index.js +9 -0
- package/run/ConsoleLogger.cjs +12 -0
- package/run/ConsoleLogger.d.ts +6 -0
- package/run/ConsoleLogger.js +8 -0
- package/run/DefaultRun.cjs +78 -0
- package/run/DefaultRun.d.ts +24 -0
- package/run/DefaultRun.js +74 -0
- package/run/IdMetadata.cjs +2 -0
- package/run/IdMetadata.d.ts +7 -0
- package/run/IdMetadata.js +1 -0
- package/run/Run.cjs +2 -0
- package/run/Run.d.ts +27 -0
- package/run/Run.js +1 -0
- package/run/RunFunction.cjs +2 -0
- package/run/RunFunction.d.ts +13 -0
- package/run/RunFunction.js +1 -0
- package/run/Vector.cjs +2 -0
- package/run/Vector.d.ts +5 -0
- package/run/Vector.js +1 -0
- package/run/index.cjs +22 -0
- package/run/index.d.ts +6 -0
- package/run/index.js +6 -0
- package/text-chunk/TextChunk.cjs +2 -0
- package/text-chunk/TextChunk.d.ts +3 -0
- package/text-chunk/TextChunk.js +1 -0
- package/text-chunk/index.cjs +22 -0
- package/text-chunk/index.d.ts +6 -0
- package/text-chunk/index.js +6 -0
- package/text-chunk/retrieve-text-chunks/TextChunkRetriever.cjs +2 -0
- package/text-chunk/retrieve-text-chunks/TextChunkRetriever.d.ts +8 -0
- package/text-chunk/retrieve-text-chunks/TextChunkRetriever.js +1 -0
- package/text-chunk/retrieve-text-chunks/retrieveTextChunks.cjs +10 -0
- package/text-chunk/retrieve-text-chunks/retrieveTextChunks.d.ts +6 -0
- package/text-chunk/retrieve-text-chunks/retrieveTextChunks.js +6 -0
- package/text-chunk/split/SplitFunction.cjs +2 -0
- package/text-chunk/split/SplitFunction.d.ts +4 -0
- package/text-chunk/split/SplitFunction.js +1 -0
- package/text-chunk/split/splitOnSeparator.cjs +12 -0
- package/text-chunk/split/splitOnSeparator.d.ts +8 -0
- package/text-chunk/split/splitOnSeparator.js +7 -0
- package/text-chunk/split/splitRecursively.cjs +41 -0
- package/text-chunk/split/splitRecursively.d.ts +22 -0
- package/text-chunk/split/splitRecursively.js +33 -0
- package/util/DurationMeasurement.cjs +42 -0
- package/util/DurationMeasurement.d.ts +5 -0
- package/util/DurationMeasurement.js +38 -0
- package/util/ErrorHandler.cjs +2 -0
- package/util/ErrorHandler.d.ts +1 -0
- package/util/ErrorHandler.js +1 -0
- package/util/SafeResult.cjs +2 -0
- package/util/SafeResult.d.ts +8 -0
- package/util/SafeResult.js +1 -0
- package/util/api/AbortError.cjs +9 -0
- package/util/api/AbortError.d.ts +3 -0
- package/util/api/AbortError.js +5 -0
- package/util/api/ApiCallError.cjs +45 -0
- package/util/api/ApiCallError.d.ts +15 -0
- package/util/api/ApiCallError.js +41 -0
- package/util/api/RetryError.cjs +24 -0
- package/util/api/RetryError.d.ts +10 -0
- package/util/api/RetryError.js +20 -0
- package/util/api/RetryFunction.cjs +2 -0
- package/util/api/RetryFunction.d.ts +1 -0
- package/util/api/RetryFunction.js +1 -0
- package/util/api/ThrottleFunction.cjs +2 -0
- package/util/api/ThrottleFunction.d.ts +1 -0
- package/util/api/ThrottleFunction.js +1 -0
- package/util/api/callWithRetryAndThrottle.cjs +7 -0
- package/util/api/callWithRetryAndThrottle.d.ts +7 -0
- package/util/api/callWithRetryAndThrottle.js +3 -0
- package/util/api/postToApi.cjs +103 -0
- package/util/api/postToApi.d.ts +29 -0
- package/util/api/postToApi.js +96 -0
- package/util/api/retryNever.cjs +8 -0
- package/util/api/retryNever.d.ts +4 -0
- package/util/api/retryNever.js +4 -0
- package/util/api/retryWithExponentialBackoff.cjs +48 -0
- package/util/api/retryWithExponentialBackoff.d.ts +10 -0
- package/util/api/retryWithExponentialBackoff.js +44 -0
- package/util/api/throttleMaxConcurrency.cjs +65 -0
- package/util/api/throttleMaxConcurrency.d.ts +7 -0
- package/util/api/throttleMaxConcurrency.js +61 -0
- package/util/api/throttleUnlimitedConcurrency.cjs +8 -0
- package/util/api/throttleUnlimitedConcurrency.d.ts +5 -0
- package/util/api/throttleUnlimitedConcurrency.js +4 -0
- package/util/cosineSimilarity.cjs +26 -0
- package/util/cosineSimilarity.d.ts +11 -0
- package/util/cosineSimilarity.js +22 -0
- package/util/index.cjs +26 -0
- package/util/index.d.ts +10 -0
- package/util/index.js +10 -0
- package/util/never.cjs +6 -0
- package/util/never.d.ts +1 -0
- package/util/never.js +2 -0
- package/util/runSafe.cjs +15 -0
- package/util/runSafe.d.ts +2 -0
- package/util/runSafe.js +11 -0
- package/vector-index/VectorIndex.cjs +2 -0
- package/vector-index/VectorIndex.d.ts +18 -0
- package/vector-index/VectorIndex.js +1 -0
- package/vector-index/VectorIndexSimilarTextChunkRetriever.cjs +57 -0
- package/vector-index/VectorIndexSimilarTextChunkRetriever.d.ts +20 -0
- package/vector-index/VectorIndexSimilarTextChunkRetriever.js +53 -0
- package/vector-index/VectorIndexTextChunkStore.cjs +77 -0
- package/vector-index/VectorIndexTextChunkStore.d.ts +35 -0
- package/vector-index/VectorIndexTextChunkStore.js +73 -0
- package/vector-index/index.cjs +22 -0
- package/vector-index/index.d.ts +6 -0
- package/vector-index/index.js +6 -0
- package/vector-index/memory/MemoryVectorIndex.cjs +63 -0
- package/vector-index/memory/MemoryVectorIndex.d.ts +31 -0
- package/vector-index/memory/MemoryVectorIndex.js +56 -0
- package/vector-index/pinecone/PineconeVectorIndex.cjs +66 -0
- package/vector-index/pinecone/PineconeVectorIndex.d.ts +29 -0
- package/vector-index/pinecone/PineconeVectorIndex.js +62 -0
- package/vector-index/upsertTextChunks.cjs +15 -0
- package/vector-index/upsertTextChunks.d.ts +11 -0
- package/vector-index/upsertTextChunks.js +11 -0
@@ -0,0 +1,119 @@
|
|
1
|
+
import { z } from "zod";
|
2
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
+
import { callWithRetryAndThrottle } from "../../util/api/callWithRetryAndThrottle.js";
|
4
|
+
import { createJsonResponseHandler, postJsonToApi, } from "../../util/api/postToApi.js";
|
5
|
+
import { failedOpenAICallResponseHandler } from "./OpenAIError.js";
|
6
|
+
/**
|
7
|
+
* @see https://openai.com/pricing
|
8
|
+
*/
|
9
|
+
const sizeToCostInMillicents = {
|
10
|
+
"1024x1024": 2000,
|
11
|
+
"512x512": 1800,
|
12
|
+
"256x256": 1600,
|
13
|
+
};
|
14
|
+
export const calculateOpenAIImageGenerationCostInMillicents = ({ settings, }) => (settings.n ?? 1) * sizeToCostInMillicents[settings.size ?? "1024x1024"];
|
15
|
+
/**
|
16
|
+
* Create an image generation model that calls the OpenAI AI image creation API.
|
17
|
+
*
|
18
|
+
* @see https://platform.openai.com/docs/api-reference/images/create
|
19
|
+
*
|
20
|
+
* @example
|
21
|
+
* const { image } = await generateImage(
|
22
|
+
* new OpenAIImageGenerationModel({ size: "512x512" }),
|
23
|
+
* "the wicked witch of the west in the style of early 19th century painting"
|
24
|
+
* );
|
25
|
+
*/
|
26
|
+
export class OpenAIImageGenerationModel extends AbstractModel {
|
27
|
+
constructor(settings) {
|
28
|
+
super({ settings });
|
29
|
+
Object.defineProperty(this, "provider", {
|
30
|
+
enumerable: true,
|
31
|
+
configurable: true,
|
32
|
+
writable: true,
|
33
|
+
value: "openai"
|
34
|
+
});
|
35
|
+
Object.defineProperty(this, "modelName", {
|
36
|
+
enumerable: true,
|
37
|
+
configurable: true,
|
38
|
+
writable: true,
|
39
|
+
value: null
|
40
|
+
});
|
41
|
+
}
|
42
|
+
get apiKey() {
|
43
|
+
const apiKey = this.settings.apiKey ?? process.env.OPENAI_API_KEY;
|
44
|
+
if (apiKey == null) {
|
45
|
+
throw new Error(`OpenAI API key is missing. Pass it as an argument to the constructor or set it as an environment variable named OPENAI_API_KEY.`);
|
46
|
+
}
|
47
|
+
return apiKey;
|
48
|
+
}
|
49
|
+
async callAPI(prompt, options) {
|
50
|
+
const run = options?.run;
|
51
|
+
const settings = options?.settings;
|
52
|
+
const responseFormat = options?.responseFormat;
|
53
|
+
const callSettings = Object.assign({
|
54
|
+
apiKey: this.apiKey,
|
55
|
+
user: this.settings.isUserIdForwardingEnabled ? run?.userId : undefined,
|
56
|
+
}, this.settings, settings, {
|
57
|
+
abortSignal: run?.abortSignal,
|
58
|
+
prompt,
|
59
|
+
responseFormat,
|
60
|
+
});
|
61
|
+
return callWithRetryAndThrottle({
|
62
|
+
retry: callSettings.retry,
|
63
|
+
throttle: callSettings.throttle,
|
64
|
+
call: async () => callOpenAIImageGenerationAPI(callSettings),
|
65
|
+
});
|
66
|
+
}
|
67
|
+
generateImageResponse(prompt, options) {
|
68
|
+
return this.callAPI(prompt, {
|
69
|
+
responseFormat: OpenAIImageGenerationResponseFormat.base64Json,
|
70
|
+
functionId: options?.functionId,
|
71
|
+
settings: options?.settings,
|
72
|
+
run: options?.run,
|
73
|
+
});
|
74
|
+
}
|
75
|
+
extractBase64Image(response) {
|
76
|
+
return response.data[0].b64_json;
|
77
|
+
}
|
78
|
+
withSettings(additionalSettings) {
|
79
|
+
return new OpenAIImageGenerationModel(Object.assign({}, this.settings, additionalSettings));
|
80
|
+
}
|
81
|
+
}
|
82
|
+
const openAIImageGenerationUrlSchema = z.object({
|
83
|
+
created: z.number(),
|
84
|
+
data: z.array(z.object({
|
85
|
+
url: z.string(),
|
86
|
+
})),
|
87
|
+
});
|
88
|
+
const openAIImageGenerationBase64JsonSchema = z.object({
|
89
|
+
created: z.number(),
|
90
|
+
data: z.array(z.object({
|
91
|
+
b64_json: z.string(),
|
92
|
+
})),
|
93
|
+
});
|
94
|
+
export const OpenAIImageGenerationResponseFormat = {
|
95
|
+
url: {
|
96
|
+
type: "url",
|
97
|
+
handler: createJsonResponseHandler(openAIImageGenerationUrlSchema),
|
98
|
+
},
|
99
|
+
base64Json: {
|
100
|
+
type: "b64_json",
|
101
|
+
handler: createJsonResponseHandler(openAIImageGenerationBase64JsonSchema),
|
102
|
+
},
|
103
|
+
};
|
104
|
+
async function callOpenAIImageGenerationAPI({ baseUrl = "https://api.openai.com/v1", abortSignal, apiKey, prompt, n, size, responseFormat, user, }) {
|
105
|
+
return postJsonToApi({
|
106
|
+
url: `${baseUrl}/images/generations`,
|
107
|
+
apiKey,
|
108
|
+
body: {
|
109
|
+
prompt,
|
110
|
+
n,
|
111
|
+
size,
|
112
|
+
response_format: responseFormat.type,
|
113
|
+
user,
|
114
|
+
},
|
115
|
+
failedResponseHandler: failedOpenAICallResponseHandler,
|
116
|
+
successfulResponseHandler: responseFormat?.handler,
|
117
|
+
abortSignal,
|
118
|
+
});
|
119
|
+
}
|
@@ -0,0 +1,8 @@
|
|
1
|
+
import { RetryFunction } from "../../util/api/RetryFunction.js";
|
2
|
+
import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
|
3
|
+
export interface OpenAIModelSettings {
|
4
|
+
baseUrl?: string;
|
5
|
+
apiKey?: string;
|
6
|
+
retry?: RetryFunction;
|
7
|
+
throttle?: ThrottleFunction;
|
8
|
+
}
|
@@ -0,0 +1 @@
|
|
1
|
+
export {};
|
@@ -0,0 +1,171 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
4
|
+
};
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
6
|
+
exports.OpenAITextEmbeddingModel = exports.calculateOpenAIEmbeddingCostInMillicents = exports.isOpenAIEmbeddingModel = exports.OPENAI_TEXT_EMBEDDING_MODELS = void 0;
|
7
|
+
const zod_1 = __importDefault(require("zod"));
|
8
|
+
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
9
|
+
const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
|
10
|
+
const callWithRetryAndThrottle_js_1 = require("../../util/api/callWithRetryAndThrottle.cjs");
|
11
|
+
const postToApi_js_1 = require("../../util/api/postToApi.cjs");
|
12
|
+
const OpenAIError_js_1 = require("./OpenAIError.cjs");
|
13
|
+
const TikTokenTokenizer_js_1 = require("./TikTokenTokenizer.cjs");
|
14
|
+
exports.OPENAI_TEXT_EMBEDDING_MODELS = {
|
15
|
+
"text-embedding-ada-002": {
|
16
|
+
contextWindowSize: 8192,
|
17
|
+
embeddingDimensions: 1536,
|
18
|
+
tokenCostInMillicents: 0.01,
|
19
|
+
},
|
20
|
+
};
|
21
|
+
const isOpenAIEmbeddingModel = (model) => model in exports.OPENAI_TEXT_EMBEDDING_MODELS;
|
22
|
+
exports.isOpenAIEmbeddingModel = isOpenAIEmbeddingModel;
|
23
|
+
const calculateOpenAIEmbeddingCostInMillicents = ({ model, responses, }) => {
|
24
|
+
let amountInMilliseconds = 0;
|
25
|
+
for (const response of responses) {
|
26
|
+
amountInMilliseconds +=
|
27
|
+
response.usage.total_tokens *
|
28
|
+
exports.OPENAI_TEXT_EMBEDDING_MODELS[model].tokenCostInMillicents;
|
29
|
+
}
|
30
|
+
return amountInMilliseconds;
|
31
|
+
};
|
32
|
+
exports.calculateOpenAIEmbeddingCostInMillicents = calculateOpenAIEmbeddingCostInMillicents;
|
33
|
+
/**
|
34
|
+
* Create a text embedding model that calls the OpenAI embedding API.
|
35
|
+
*
|
36
|
+
* @see https://platform.openai.com/docs/api-reference/embeddings
|
37
|
+
*
|
38
|
+
* @example
|
39
|
+
* const { embeddings } = await embedTexts(
|
40
|
+
* new OpenAITextEmbeddingModel({ model: "text-embedding-ada-002" }),
|
41
|
+
* [
|
42
|
+
* "At first, Nox didn't know what to do with the pup.",
|
43
|
+
* "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
|
44
|
+
* ]
|
45
|
+
* );
|
46
|
+
*/
|
47
|
+
class OpenAITextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
48
|
+
constructor(settings) {
|
49
|
+
super({ settings });
|
50
|
+
Object.defineProperty(this, "provider", {
|
51
|
+
enumerable: true,
|
52
|
+
configurable: true,
|
53
|
+
writable: true,
|
54
|
+
value: "openai"
|
55
|
+
});
|
56
|
+
Object.defineProperty(this, "maxTextsPerCall", {
|
57
|
+
enumerable: true,
|
58
|
+
configurable: true,
|
59
|
+
writable: true,
|
60
|
+
value: 1
|
61
|
+
});
|
62
|
+
Object.defineProperty(this, "embeddingDimensions", {
|
63
|
+
enumerable: true,
|
64
|
+
configurable: true,
|
65
|
+
writable: true,
|
66
|
+
value: void 0
|
67
|
+
});
|
68
|
+
Object.defineProperty(this, "tokenizer", {
|
69
|
+
enumerable: true,
|
70
|
+
configurable: true,
|
71
|
+
writable: true,
|
72
|
+
value: void 0
|
73
|
+
});
|
74
|
+
Object.defineProperty(this, "contextWindowSize", {
|
75
|
+
enumerable: true,
|
76
|
+
configurable: true,
|
77
|
+
writable: true,
|
78
|
+
value: void 0
|
79
|
+
});
|
80
|
+
this.tokenizer = new TikTokenTokenizer_js_1.TikTokenTokenizer({ model: this.modelName });
|
81
|
+
this.contextWindowSize =
|
82
|
+
exports.OPENAI_TEXT_EMBEDDING_MODELS[this.modelName].contextWindowSize;
|
83
|
+
this.embeddingDimensions =
|
84
|
+
exports.OPENAI_TEXT_EMBEDDING_MODELS[this.modelName].embeddingDimensions;
|
85
|
+
}
|
86
|
+
get modelName() {
|
87
|
+
return this.settings.model;
|
88
|
+
}
|
89
|
+
get apiKey() {
|
90
|
+
const apiKey = this.settings.apiKey ?? process.env.OPENAI_API_KEY;
|
91
|
+
if (apiKey == null) {
|
92
|
+
throw new Error(`OpenAI API key is missing. Pass it as an argument to the constructor or set it as an environment variable named OPENAI_API_KEY.`);
|
93
|
+
}
|
94
|
+
return apiKey;
|
95
|
+
}
|
96
|
+
async countTokens(input) {
|
97
|
+
return (0, countTokens_js_1.countTokens)(this.tokenizer, input);
|
98
|
+
}
|
99
|
+
async callAPI(text, options) {
|
100
|
+
const run = options?.run;
|
101
|
+
const settings = options?.settings;
|
102
|
+
const callSettings = Object.assign({
|
103
|
+
apiKey: this.apiKey,
|
104
|
+
user: this.settings.isUserIdForwardingEnabled ? run?.userId : undefined,
|
105
|
+
}, this.settings, settings, {
|
106
|
+
abortSignal: run?.abortSignal,
|
107
|
+
input: text,
|
108
|
+
});
|
109
|
+
return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
|
110
|
+
retry: this.settings.retry,
|
111
|
+
throttle: this.settings.throttle,
|
112
|
+
call: async () => callOpenAITextEmbeddingAPI(callSettings),
|
113
|
+
});
|
114
|
+
}
|
115
|
+
generateEmbeddingResponse(texts, options) {
|
116
|
+
if (texts.length > this.maxTextsPerCall) {
|
117
|
+
throw new Error(`The OpenAI embedding API only supports ${this.maxTextsPerCall} texts per API call.`);
|
118
|
+
}
|
119
|
+
return this.callAPI(texts[0], options);
|
120
|
+
}
|
121
|
+
extractEmbeddings(response) {
|
122
|
+
return [response.data[0].embedding];
|
123
|
+
}
|
124
|
+
withSettings(additionalSettings) {
|
125
|
+
return new OpenAITextEmbeddingModel(Object.assign({}, this.settings, additionalSettings));
|
126
|
+
}
|
127
|
+
}
|
128
|
+
exports.OpenAITextEmbeddingModel = OpenAITextEmbeddingModel;
|
129
|
+
const openAITextEmbeddingResponseSchema = zod_1.default.object({
|
130
|
+
object: zod_1.default.literal("list"),
|
131
|
+
data: zod_1.default
|
132
|
+
.array(zod_1.default.object({
|
133
|
+
object: zod_1.default.literal("embedding"),
|
134
|
+
embedding: zod_1.default.array(zod_1.default.number()),
|
135
|
+
index: zod_1.default.number(),
|
136
|
+
}))
|
137
|
+
.length(1),
|
138
|
+
model: zod_1.default.string(),
|
139
|
+
usage: zod_1.default.object({
|
140
|
+
prompt_tokens: zod_1.default.number(),
|
141
|
+
total_tokens: zod_1.default.number(),
|
142
|
+
}),
|
143
|
+
});
|
144
|
+
/**
|
145
|
+
* Call the OpenAI Embedding API to generate an embedding for the given input.
|
146
|
+
*
|
147
|
+
* @see https://platform.openai.com/docs/api-reference/embeddings
|
148
|
+
*
|
149
|
+
* @example
|
150
|
+
* const response = await callOpenAITextEmbeddingAPI({
|
151
|
+
* apiKey: OPENAI_API_KEY,
|
152
|
+
* model: "text-embedding-ada-002",
|
153
|
+
* input: "At first, Nox didn't know what to do with the pup.",
|
154
|
+
* });
|
155
|
+
*
|
156
|
+
* console.log(response.data[0].embedding);
|
157
|
+
*/
|
158
|
+
async function callOpenAITextEmbeddingAPI({ baseUrl = "https://api.openai.com/v1", abortSignal, apiKey, model, input, user, }) {
|
159
|
+
return (0, postToApi_js_1.postJsonToApi)({
|
160
|
+
url: `${baseUrl}/embeddings`,
|
161
|
+
apiKey,
|
162
|
+
body: {
|
163
|
+
model,
|
164
|
+
input,
|
165
|
+
user,
|
166
|
+
},
|
167
|
+
failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
|
168
|
+
successfulResponseHandler: (0, postToApi_js_1.createJsonResponseHandler)(openAITextEmbeddingResponseSchema),
|
169
|
+
abortSignal,
|
170
|
+
});
|
171
|
+
}
|
@@ -0,0 +1,122 @@
|
|
1
|
+
import z from "zod";
|
2
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
+
import { FunctionOptions } from "../../model-function/FunctionOptions.js";
|
4
|
+
import { TextEmbeddingModel, TextEmbeddingModelSettings } from "../../model-function/embed-text/TextEmbeddingModel.js";
|
5
|
+
import { RetryFunction } from "../../util/api/RetryFunction.js";
|
6
|
+
import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
|
7
|
+
import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
|
8
|
+
export declare const OPENAI_TEXT_EMBEDDING_MODELS: {
|
9
|
+
"text-embedding-ada-002": {
|
10
|
+
contextWindowSize: number;
|
11
|
+
embeddingDimensions: number;
|
12
|
+
tokenCostInMillicents: number;
|
13
|
+
};
|
14
|
+
};
|
15
|
+
export type OpenAITextEmbeddingModelType = keyof typeof OPENAI_TEXT_EMBEDDING_MODELS;
|
16
|
+
export declare const isOpenAIEmbeddingModel: (model: string) => model is "text-embedding-ada-002";
|
17
|
+
export declare const calculateOpenAIEmbeddingCostInMillicents: ({ model, responses, }: {
|
18
|
+
model: OpenAITextEmbeddingModelType;
|
19
|
+
responses: OpenAITextEmbeddingResponse[];
|
20
|
+
}) => number;
|
21
|
+
export interface OpenAITextEmbeddingModelSettings extends TextEmbeddingModelSettings {
|
22
|
+
model: OpenAITextEmbeddingModelType;
|
23
|
+
baseUrl?: string;
|
24
|
+
apiKey?: string;
|
25
|
+
retry?: RetryFunction;
|
26
|
+
throttle?: ThrottleFunction;
|
27
|
+
isUserIdForwardingEnabled?: boolean;
|
28
|
+
}
|
29
|
+
/**
|
30
|
+
* Create a text embedding model that calls the OpenAI embedding API.
|
31
|
+
*
|
32
|
+
* @see https://platform.openai.com/docs/api-reference/embeddings
|
33
|
+
*
|
34
|
+
* @example
|
35
|
+
* const { embeddings } = await embedTexts(
|
36
|
+
* new OpenAITextEmbeddingModel({ model: "text-embedding-ada-002" }),
|
37
|
+
* [
|
38
|
+
* "At first, Nox didn't know what to do with the pup.",
|
39
|
+
* "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
|
40
|
+
* ]
|
41
|
+
* );
|
42
|
+
*/
|
43
|
+
export declare class OpenAITextEmbeddingModel extends AbstractModel<OpenAITextEmbeddingModelSettings> implements TextEmbeddingModel<OpenAITextEmbeddingResponse, OpenAITextEmbeddingModelSettings> {
|
44
|
+
constructor(settings: OpenAITextEmbeddingModelSettings);
|
45
|
+
readonly provider: "openai";
|
46
|
+
get modelName(): "text-embedding-ada-002";
|
47
|
+
readonly maxTextsPerCall = 1;
|
48
|
+
readonly embeddingDimensions: number;
|
49
|
+
readonly tokenizer: TikTokenTokenizer;
|
50
|
+
readonly contextWindowSize: number;
|
51
|
+
private get apiKey();
|
52
|
+
countTokens(input: string): Promise<number>;
|
53
|
+
callAPI(text: string, options?: FunctionOptions<OpenAITextEmbeddingModelSettings>): Promise<OpenAITextEmbeddingResponse>;
|
54
|
+
generateEmbeddingResponse(texts: string[], options?: FunctionOptions<OpenAITextEmbeddingModelSettings>): Promise<{
|
55
|
+
object: "list";
|
56
|
+
model: string;
|
57
|
+
data: {
|
58
|
+
object: "embedding";
|
59
|
+
embedding: number[];
|
60
|
+
index: number;
|
61
|
+
}[];
|
62
|
+
usage: {
|
63
|
+
prompt_tokens: number;
|
64
|
+
total_tokens: number;
|
65
|
+
};
|
66
|
+
}>;
|
67
|
+
extractEmbeddings(response: OpenAITextEmbeddingResponse): number[][];
|
68
|
+
withSettings(additionalSettings: OpenAITextEmbeddingModelSettings): this;
|
69
|
+
}
|
70
|
+
declare const openAITextEmbeddingResponseSchema: z.ZodObject<{
|
71
|
+
object: z.ZodLiteral<"list">;
|
72
|
+
data: z.ZodArray<z.ZodObject<{
|
73
|
+
object: z.ZodLiteral<"embedding">;
|
74
|
+
embedding: z.ZodArray<z.ZodNumber, "many">;
|
75
|
+
index: z.ZodNumber;
|
76
|
+
}, "strip", z.ZodTypeAny, {
|
77
|
+
object: "embedding";
|
78
|
+
embedding: number[];
|
79
|
+
index: number;
|
80
|
+
}, {
|
81
|
+
object: "embedding";
|
82
|
+
embedding: number[];
|
83
|
+
index: number;
|
84
|
+
}>, "many">;
|
85
|
+
model: z.ZodString;
|
86
|
+
usage: z.ZodObject<{
|
87
|
+
prompt_tokens: z.ZodNumber;
|
88
|
+
total_tokens: z.ZodNumber;
|
89
|
+
}, "strip", z.ZodTypeAny, {
|
90
|
+
prompt_tokens: number;
|
91
|
+
total_tokens: number;
|
92
|
+
}, {
|
93
|
+
prompt_tokens: number;
|
94
|
+
total_tokens: number;
|
95
|
+
}>;
|
96
|
+
}, "strip", z.ZodTypeAny, {
|
97
|
+
object: "list";
|
98
|
+
model: string;
|
99
|
+
data: {
|
100
|
+
object: "embedding";
|
101
|
+
embedding: number[];
|
102
|
+
index: number;
|
103
|
+
}[];
|
104
|
+
usage: {
|
105
|
+
prompt_tokens: number;
|
106
|
+
total_tokens: number;
|
107
|
+
};
|
108
|
+
}, {
|
109
|
+
object: "list";
|
110
|
+
model: string;
|
111
|
+
data: {
|
112
|
+
object: "embedding";
|
113
|
+
embedding: number[];
|
114
|
+
index: number;
|
115
|
+
}[];
|
116
|
+
usage: {
|
117
|
+
prompt_tokens: number;
|
118
|
+
total_tokens: number;
|
119
|
+
};
|
120
|
+
}>;
|
121
|
+
export type OpenAITextEmbeddingResponse = z.infer<typeof openAITextEmbeddingResponseSchema>;
|
122
|
+
export {};
|
@@ -0,0 +1,162 @@
|
|
1
|
+
import z from "zod";
|
2
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
+
import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
|
4
|
+
import { callWithRetryAndThrottle } from "../../util/api/callWithRetryAndThrottle.js";
|
5
|
+
import { createJsonResponseHandler, postJsonToApi, } from "../../util/api/postToApi.js";
|
6
|
+
import { failedOpenAICallResponseHandler } from "./OpenAIError.js";
|
7
|
+
import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
|
8
|
+
export const OPENAI_TEXT_EMBEDDING_MODELS = {
|
9
|
+
"text-embedding-ada-002": {
|
10
|
+
contextWindowSize: 8192,
|
11
|
+
embeddingDimensions: 1536,
|
12
|
+
tokenCostInMillicents: 0.01,
|
13
|
+
},
|
14
|
+
};
|
15
|
+
export const isOpenAIEmbeddingModel = (model) => model in OPENAI_TEXT_EMBEDDING_MODELS;
|
16
|
+
export const calculateOpenAIEmbeddingCostInMillicents = ({ model, responses, }) => {
|
17
|
+
let amountInMilliseconds = 0;
|
18
|
+
for (const response of responses) {
|
19
|
+
amountInMilliseconds +=
|
20
|
+
response.usage.total_tokens *
|
21
|
+
OPENAI_TEXT_EMBEDDING_MODELS[model].tokenCostInMillicents;
|
22
|
+
}
|
23
|
+
return amountInMilliseconds;
|
24
|
+
};
|
25
|
+
/**
|
26
|
+
* Create a text embedding model that calls the OpenAI embedding API.
|
27
|
+
*
|
28
|
+
* @see https://platform.openai.com/docs/api-reference/embeddings
|
29
|
+
*
|
30
|
+
* @example
|
31
|
+
* const { embeddings } = await embedTexts(
|
32
|
+
* new OpenAITextEmbeddingModel({ model: "text-embedding-ada-002" }),
|
33
|
+
* [
|
34
|
+
* "At first, Nox didn't know what to do with the pup.",
|
35
|
+
* "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
|
36
|
+
* ]
|
37
|
+
* );
|
38
|
+
*/
|
39
|
+
export class OpenAITextEmbeddingModel extends AbstractModel {
|
40
|
+
constructor(settings) {
|
41
|
+
super({ settings });
|
42
|
+
Object.defineProperty(this, "provider", {
|
43
|
+
enumerable: true,
|
44
|
+
configurable: true,
|
45
|
+
writable: true,
|
46
|
+
value: "openai"
|
47
|
+
});
|
48
|
+
Object.defineProperty(this, "maxTextsPerCall", {
|
49
|
+
enumerable: true,
|
50
|
+
configurable: true,
|
51
|
+
writable: true,
|
52
|
+
value: 1
|
53
|
+
});
|
54
|
+
Object.defineProperty(this, "embeddingDimensions", {
|
55
|
+
enumerable: true,
|
56
|
+
configurable: true,
|
57
|
+
writable: true,
|
58
|
+
value: void 0
|
59
|
+
});
|
60
|
+
Object.defineProperty(this, "tokenizer", {
|
61
|
+
enumerable: true,
|
62
|
+
configurable: true,
|
63
|
+
writable: true,
|
64
|
+
value: void 0
|
65
|
+
});
|
66
|
+
Object.defineProperty(this, "contextWindowSize", {
|
67
|
+
enumerable: true,
|
68
|
+
configurable: true,
|
69
|
+
writable: true,
|
70
|
+
value: void 0
|
71
|
+
});
|
72
|
+
this.tokenizer = new TikTokenTokenizer({ model: this.modelName });
|
73
|
+
this.contextWindowSize =
|
74
|
+
OPENAI_TEXT_EMBEDDING_MODELS[this.modelName].contextWindowSize;
|
75
|
+
this.embeddingDimensions =
|
76
|
+
OPENAI_TEXT_EMBEDDING_MODELS[this.modelName].embeddingDimensions;
|
77
|
+
}
|
78
|
+
get modelName() {
|
79
|
+
return this.settings.model;
|
80
|
+
}
|
81
|
+
get apiKey() {
|
82
|
+
const apiKey = this.settings.apiKey ?? process.env.OPENAI_API_KEY;
|
83
|
+
if (apiKey == null) {
|
84
|
+
throw new Error(`OpenAI API key is missing. Pass it as an argument to the constructor or set it as an environment variable named OPENAI_API_KEY.`);
|
85
|
+
}
|
86
|
+
return apiKey;
|
87
|
+
}
|
88
|
+
async countTokens(input) {
|
89
|
+
return countTokens(this.tokenizer, input);
|
90
|
+
}
|
91
|
+
async callAPI(text, options) {
|
92
|
+
const run = options?.run;
|
93
|
+
const settings = options?.settings;
|
94
|
+
const callSettings = Object.assign({
|
95
|
+
apiKey: this.apiKey,
|
96
|
+
user: this.settings.isUserIdForwardingEnabled ? run?.userId : undefined,
|
97
|
+
}, this.settings, settings, {
|
98
|
+
abortSignal: run?.abortSignal,
|
99
|
+
input: text,
|
100
|
+
});
|
101
|
+
return callWithRetryAndThrottle({
|
102
|
+
retry: this.settings.retry,
|
103
|
+
throttle: this.settings.throttle,
|
104
|
+
call: async () => callOpenAITextEmbeddingAPI(callSettings),
|
105
|
+
});
|
106
|
+
}
|
107
|
+
generateEmbeddingResponse(texts, options) {
|
108
|
+
if (texts.length > this.maxTextsPerCall) {
|
109
|
+
throw new Error(`The OpenAI embedding API only supports ${this.maxTextsPerCall} texts per API call.`);
|
110
|
+
}
|
111
|
+
return this.callAPI(texts[0], options);
|
112
|
+
}
|
113
|
+
extractEmbeddings(response) {
|
114
|
+
return [response.data[0].embedding];
|
115
|
+
}
|
116
|
+
withSettings(additionalSettings) {
|
117
|
+
return new OpenAITextEmbeddingModel(Object.assign({}, this.settings, additionalSettings));
|
118
|
+
}
|
119
|
+
}
|
120
|
+
const openAITextEmbeddingResponseSchema = z.object({
|
121
|
+
object: z.literal("list"),
|
122
|
+
data: z
|
123
|
+
.array(z.object({
|
124
|
+
object: z.literal("embedding"),
|
125
|
+
embedding: z.array(z.number()),
|
126
|
+
index: z.number(),
|
127
|
+
}))
|
128
|
+
.length(1),
|
129
|
+
model: z.string(),
|
130
|
+
usage: z.object({
|
131
|
+
prompt_tokens: z.number(),
|
132
|
+
total_tokens: z.number(),
|
133
|
+
}),
|
134
|
+
});
|
135
|
+
/**
|
136
|
+
* Call the OpenAI Embedding API to generate an embedding for the given input.
|
137
|
+
*
|
138
|
+
* @see https://platform.openai.com/docs/api-reference/embeddings
|
139
|
+
*
|
140
|
+
* @example
|
141
|
+
* const response = await callOpenAITextEmbeddingAPI({
|
142
|
+
* apiKey: OPENAI_API_KEY,
|
143
|
+
* model: "text-embedding-ada-002",
|
144
|
+
* input: "At first, Nox didn't know what to do with the pup.",
|
145
|
+
* });
|
146
|
+
*
|
147
|
+
* console.log(response.data[0].embedding);
|
148
|
+
*/
|
149
|
+
async function callOpenAITextEmbeddingAPI({ baseUrl = "https://api.openai.com/v1", abortSignal, apiKey, model, input, user, }) {
|
150
|
+
return postJsonToApi({
|
151
|
+
url: `${baseUrl}/embeddings`,
|
152
|
+
apiKey,
|
153
|
+
body: {
|
154
|
+
model,
|
155
|
+
input,
|
156
|
+
user,
|
157
|
+
},
|
158
|
+
failedResponseHandler: failedOpenAICallResponseHandler,
|
159
|
+
successfulResponseHandler: createJsonResponseHandler(openAITextEmbeddingResponseSchema),
|
160
|
+
abortSignal,
|
161
|
+
});
|
162
|
+
}
|