modelfusion 0.0.44
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +429 -0
- package/composed-function/index.cjs +22 -0
- package/composed-function/index.d.ts +6 -0
- package/composed-function/index.js +6 -0
- package/composed-function/summarize/SummarizationFunction.cjs +2 -0
- package/composed-function/summarize/SummarizationFunction.d.ts +4 -0
- package/composed-function/summarize/SummarizationFunction.js +1 -0
- package/composed-function/summarize/summarizeRecursively.cjs +19 -0
- package/composed-function/summarize/summarizeRecursively.d.ts +11 -0
- package/composed-function/summarize/summarizeRecursively.js +15 -0
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +29 -0
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +24 -0
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +25 -0
- package/composed-function/use-tool/NoSuchToolError.cjs +17 -0
- package/composed-function/use-tool/NoSuchToolError.d.ts +4 -0
- package/composed-function/use-tool/NoSuchToolError.js +13 -0
- package/composed-function/use-tool/Tool.cjs +43 -0
- package/composed-function/use-tool/Tool.d.ts +15 -0
- package/composed-function/use-tool/Tool.js +39 -0
- package/composed-function/use-tool/useTool.cjs +59 -0
- package/composed-function/use-tool/useTool.d.ts +36 -0
- package/composed-function/use-tool/useTool.js +54 -0
- package/cost/Cost.cjs +38 -0
- package/cost/Cost.d.ts +16 -0
- package/cost/Cost.js +34 -0
- package/cost/CostCalculator.cjs +2 -0
- package/cost/CostCalculator.d.ts +8 -0
- package/cost/CostCalculator.js +1 -0
- package/cost/calculateCost.cjs +28 -0
- package/cost/calculateCost.d.ts +7 -0
- package/cost/calculateCost.js +24 -0
- package/cost/index.cjs +19 -0
- package/cost/index.d.ts +3 -0
- package/cost/index.js +3 -0
- package/index.cjs +25 -0
- package/index.d.ts +9 -0
- package/index.js +9 -0
- package/model-function/AbstractModel.cjs +22 -0
- package/model-function/AbstractModel.d.ts +12 -0
- package/model-function/AbstractModel.js +18 -0
- package/model-function/FunctionOptions.cjs +2 -0
- package/model-function/FunctionOptions.d.ts +6 -0
- package/model-function/FunctionOptions.js +1 -0
- package/model-function/Model.cjs +2 -0
- package/model-function/Model.d.ts +23 -0
- package/model-function/Model.js +1 -0
- package/model-function/ModelCallEvent.cjs +2 -0
- package/model-function/ModelCallEvent.d.ts +18 -0
- package/model-function/ModelCallEvent.js +1 -0
- package/model-function/ModelCallEventSource.cjs +42 -0
- package/model-function/ModelCallEventSource.d.ts +13 -0
- package/model-function/ModelCallEventSource.js +38 -0
- package/model-function/ModelCallObserver.cjs +2 -0
- package/model-function/ModelCallObserver.d.ts +5 -0
- package/model-function/ModelCallObserver.js +1 -0
- package/model-function/ModelInformation.cjs +2 -0
- package/model-function/ModelInformation.d.ts +4 -0
- package/model-function/ModelInformation.js +1 -0
- package/model-function/SuccessfulModelCall.cjs +22 -0
- package/model-function/SuccessfulModelCall.d.ts +9 -0
- package/model-function/SuccessfulModelCall.js +18 -0
- package/model-function/embed-text/TextEmbeddingEvent.cjs +2 -0
- package/model-function/embed-text/TextEmbeddingEvent.d.ts +23 -0
- package/model-function/embed-text/TextEmbeddingEvent.js +1 -0
- package/model-function/embed-text/TextEmbeddingModel.cjs +2 -0
- package/model-function/embed-text/TextEmbeddingModel.d.ts +18 -0
- package/model-function/embed-text/TextEmbeddingModel.js +1 -0
- package/model-function/embed-text/embedText.cjs +90 -0
- package/model-function/embed-text/embedText.d.ts +33 -0
- package/model-function/embed-text/embedText.js +85 -0
- package/model-function/executeCall.cjs +60 -0
- package/model-function/executeCall.d.ts +27 -0
- package/model-function/executeCall.js +56 -0
- package/model-function/generate-image/ImageGenerationEvent.cjs +2 -0
- package/model-function/generate-image/ImageGenerationEvent.d.ts +22 -0
- package/model-function/generate-image/ImageGenerationEvent.js +1 -0
- package/model-function/generate-image/ImageGenerationModel.cjs +2 -0
- package/model-function/generate-image/ImageGenerationModel.d.ts +8 -0
- package/model-function/generate-image/ImageGenerationModel.js +1 -0
- package/model-function/generate-image/generateImage.cjs +63 -0
- package/model-function/generate-image/generateImage.d.ts +23 -0
- package/model-function/generate-image/generateImage.js +59 -0
- package/model-function/generate-json/GenerateJsonModel.cjs +2 -0
- package/model-function/generate-json/GenerateJsonModel.d.ts +10 -0
- package/model-function/generate-json/GenerateJsonModel.js +1 -0
- package/model-function/generate-json/GenerateJsonOrTextModel.cjs +2 -0
- package/model-function/generate-json/GenerateJsonOrTextModel.d.ts +18 -0
- package/model-function/generate-json/GenerateJsonOrTextModel.js +1 -0
- package/model-function/generate-json/JsonGenerationEvent.cjs +2 -0
- package/model-function/generate-json/JsonGenerationEvent.d.ts +22 -0
- package/model-function/generate-json/JsonGenerationEvent.js +1 -0
- package/model-function/generate-json/NoSuchSchemaError.cjs +17 -0
- package/model-function/generate-json/NoSuchSchemaError.d.ts +4 -0
- package/model-function/generate-json/NoSuchSchemaError.js +13 -0
- package/model-function/generate-json/SchemaDefinition.cjs +2 -0
- package/model-function/generate-json/SchemaDefinition.d.ts +6 -0
- package/model-function/generate-json/SchemaDefinition.js +1 -0
- package/model-function/generate-json/SchemaValidationError.cjs +36 -0
- package/model-function/generate-json/SchemaValidationError.d.ts +11 -0
- package/model-function/generate-json/SchemaValidationError.js +32 -0
- package/model-function/generate-json/generateJson.cjs +61 -0
- package/model-function/generate-json/generateJson.d.ts +9 -0
- package/model-function/generate-json/generateJson.js +57 -0
- package/model-function/generate-json/generateJsonOrText.cjs +74 -0
- package/model-function/generate-json/generateJsonOrText.d.ts +25 -0
- package/model-function/generate-json/generateJsonOrText.js +70 -0
- package/model-function/generate-text/AsyncQueue.cjs +66 -0
- package/model-function/generate-text/AsyncQueue.d.ts +17 -0
- package/model-function/generate-text/AsyncQueue.js +62 -0
- package/model-function/generate-text/DeltaEvent.cjs +2 -0
- package/model-function/generate-text/DeltaEvent.d.ts +7 -0
- package/model-function/generate-text/DeltaEvent.js +1 -0
- package/model-function/generate-text/TextDeltaEventSource.cjs +54 -0
- package/model-function/generate-text/TextDeltaEventSource.d.ts +5 -0
- package/model-function/generate-text/TextDeltaEventSource.js +46 -0
- package/model-function/generate-text/TextGenerationEvent.cjs +2 -0
- package/model-function/generate-text/TextGenerationEvent.d.ts +22 -0
- package/model-function/generate-text/TextGenerationEvent.js +1 -0
- package/model-function/generate-text/TextGenerationModel.cjs +2 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +42 -0
- package/model-function/generate-text/TextGenerationModel.js +1 -0
- package/model-function/generate-text/TextStreamingEvent.cjs +2 -0
- package/model-function/generate-text/TextStreamingEvent.d.ts +22 -0
- package/model-function/generate-text/TextStreamingEvent.js +1 -0
- package/model-function/generate-text/extractTextDeltas.cjs +23 -0
- package/model-function/generate-text/extractTextDeltas.d.ts +7 -0
- package/model-function/generate-text/extractTextDeltas.js +19 -0
- package/model-function/generate-text/generateText.cjs +67 -0
- package/model-function/generate-text/generateText.d.ts +20 -0
- package/model-function/generate-text/generateText.js +63 -0
- package/model-function/generate-text/parseEventSourceReadableStream.cjs +30 -0
- package/model-function/generate-text/parseEventSourceReadableStream.d.ts +8 -0
- package/model-function/generate-text/parseEventSourceReadableStream.js +26 -0
- package/model-function/generate-text/streamText.cjs +115 -0
- package/model-function/generate-text/streamText.d.ts +11 -0
- package/model-function/generate-text/streamText.js +111 -0
- package/model-function/index.cjs +47 -0
- package/model-function/index.d.ts +31 -0
- package/model-function/index.js +31 -0
- package/model-function/tokenize-text/Tokenizer.cjs +2 -0
- package/model-function/tokenize-text/Tokenizer.d.ts +19 -0
- package/model-function/tokenize-text/Tokenizer.js +1 -0
- package/model-function/tokenize-text/countTokens.cjs +10 -0
- package/model-function/tokenize-text/countTokens.d.ts +5 -0
- package/model-function/tokenize-text/countTokens.js +6 -0
- package/model-function/transcribe-audio/TranscriptionEvent.cjs +2 -0
- package/model-function/transcribe-audio/TranscriptionEvent.d.ts +22 -0
- package/model-function/transcribe-audio/TranscriptionEvent.js +1 -0
- package/model-function/transcribe-audio/TranscriptionModel.cjs +2 -0
- package/model-function/transcribe-audio/TranscriptionModel.d.ts +8 -0
- package/model-function/transcribe-audio/TranscriptionModel.js +1 -0
- package/model-function/transcribe-audio/transcribe.cjs +62 -0
- package/model-function/transcribe-audio/transcribe.d.ts +22 -0
- package/model-function/transcribe-audio/transcribe.js +58 -0
- package/model-provider/automatic1111/Automatic1111Error.cjs +39 -0
- package/model-provider/automatic1111/Automatic1111Error.d.ts +31 -0
- package/model-provider/automatic1111/Automatic1111Error.js +31 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +76 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +54 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +72 -0
- package/model-provider/automatic1111/index.cjs +20 -0
- package/model-provider/automatic1111/index.d.ts +2 -0
- package/model-provider/automatic1111/index.js +2 -0
- package/model-provider/cohere/CohereError.cjs +36 -0
- package/model-provider/cohere/CohereError.d.ts +22 -0
- package/model-provider/cohere/CohereError.js +28 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.cjs +172 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +119 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.js +165 -0
- package/model-provider/cohere/CohereTextGenerationModel.cjs +283 -0
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +203 -0
- package/model-provider/cohere/CohereTextGenerationModel.js +276 -0
- package/model-provider/cohere/CohereTokenizer.cjs +136 -0
- package/model-provider/cohere/CohereTokenizer.d.ts +118 -0
- package/model-provider/cohere/CohereTokenizer.js +129 -0
- package/model-provider/cohere/index.cjs +22 -0
- package/model-provider/cohere/index.d.ts +4 -0
- package/model-provider/cohere/index.js +4 -0
- package/model-provider/huggingface/HuggingFaceError.cjs +52 -0
- package/model-provider/huggingface/HuggingFaceError.d.ts +22 -0
- package/model-provider/huggingface/HuggingFaceError.js +44 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +174 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +75 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +167 -0
- package/model-provider/huggingface/index.cjs +20 -0
- package/model-provider/huggingface/index.d.ts +2 -0
- package/model-provider/huggingface/index.js +2 -0
- package/model-provider/index.cjs +22 -0
- package/model-provider/index.d.ts +6 -0
- package/model-provider/index.js +6 -0
- package/model-provider/llamacpp/LlamaCppError.cjs +52 -0
- package/model-provider/llamacpp/LlamaCppError.d.ts +22 -0
- package/model-provider/llamacpp/LlamaCppError.js +44 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +96 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +40 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +89 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +245 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +399 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +238 -0
- package/model-provider/llamacpp/LlamaCppTokenizer.cjs +64 -0
- package/model-provider/llamacpp/LlamaCppTokenizer.d.ts +38 -0
- package/model-provider/llamacpp/LlamaCppTokenizer.js +57 -0
- package/model-provider/llamacpp/index.cjs +22 -0
- package/model-provider/llamacpp/index.d.ts +4 -0
- package/model-provider/llamacpp/index.js +4 -0
- package/model-provider/openai/OpenAICostCalculator.cjs +71 -0
- package/model-provider/openai/OpenAICostCalculator.d.ts +6 -0
- package/model-provider/openai/OpenAICostCalculator.js +67 -0
- package/model-provider/openai/OpenAIError.cjs +50 -0
- package/model-provider/openai/OpenAIError.d.ts +47 -0
- package/model-provider/openai/OpenAIError.js +42 -0
- package/model-provider/openai/OpenAIImageGenerationModel.cjs +124 -0
- package/model-provider/openai/OpenAIImageGenerationModel.d.ts +113 -0
- package/model-provider/openai/OpenAIImageGenerationModel.js +119 -0
- package/model-provider/openai/OpenAIModelSettings.cjs +2 -0
- package/model-provider/openai/OpenAIModelSettings.d.ts +8 -0
- package/model-provider/openai/OpenAIModelSettings.js +1 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +171 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +122 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.js +162 -0
- package/model-provider/openai/OpenAITextGenerationModel.cjs +326 -0
- package/model-provider/openai/OpenAITextGenerationModel.d.ts +254 -0
- package/model-provider/openai/OpenAITextGenerationModel.js +317 -0
- package/model-provider/openai/OpenAITranscriptionModel.cjs +195 -0
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +196 -0
- package/model-provider/openai/OpenAITranscriptionModel.js +187 -0
- package/model-provider/openai/TikTokenTokenizer.cjs +86 -0
- package/model-provider/openai/TikTokenTokenizer.d.ts +35 -0
- package/model-provider/openai/TikTokenTokenizer.js +82 -0
- package/model-provider/openai/chat/OpenAIChatMessage.cjs +24 -0
- package/model-provider/openai/chat/OpenAIChatMessage.d.ts +26 -0
- package/model-provider/openai/chat/OpenAIChatMessage.js +21 -0
- package/model-provider/openai/chat/OpenAIChatModel.cjs +288 -0
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +344 -0
- package/model-provider/openai/chat/OpenAIChatModel.js +279 -0
- package/model-provider/openai/chat/OpenAIChatPrompt.cjs +143 -0
- package/model-provider/openai/chat/OpenAIChatPrompt.d.ts +108 -0
- package/model-provider/openai/chat/OpenAIChatPrompt.js +135 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +112 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +19 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +105 -0
- package/model-provider/openai/chat/countOpenAIChatMessageTokens.cjs +28 -0
- package/model-provider/openai/chat/countOpenAIChatMessageTokens.d.ts +20 -0
- package/model-provider/openai/chat/countOpenAIChatMessageTokens.js +23 -0
- package/model-provider/openai/index.cjs +31 -0
- package/model-provider/openai/index.d.ts +13 -0
- package/model-provider/openai/index.js +12 -0
- package/model-provider/stability/StabilityError.cjs +36 -0
- package/model-provider/stability/StabilityError.d.ts +22 -0
- package/model-provider/stability/StabilityError.js +28 -0
- package/model-provider/stability/StabilityImageGenerationModel.cjs +133 -0
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +95 -0
- package/model-provider/stability/StabilityImageGenerationModel.js +129 -0
- package/model-provider/stability/index.cjs +20 -0
- package/model-provider/stability/index.d.ts +2 -0
- package/model-provider/stability/index.js +2 -0
- package/package.json +87 -0
- package/prompt/InstructionPrompt.cjs +2 -0
- package/prompt/InstructionPrompt.d.ts +7 -0
- package/prompt/InstructionPrompt.js +1 -0
- package/prompt/Llama2PromptMapping.cjs +56 -0
- package/prompt/Llama2PromptMapping.d.ts +10 -0
- package/prompt/Llama2PromptMapping.js +51 -0
- package/prompt/OpenAIChatPromptMapping.cjs +62 -0
- package/prompt/OpenAIChatPromptMapping.d.ts +6 -0
- package/prompt/OpenAIChatPromptMapping.js +57 -0
- package/prompt/PromptMapping.cjs +2 -0
- package/prompt/PromptMapping.d.ts +7 -0
- package/prompt/PromptMapping.js +1 -0
- package/prompt/PromptMappingTextGenerationModel.cjs +88 -0
- package/prompt/PromptMappingTextGenerationModel.d.ts +26 -0
- package/prompt/PromptMappingTextGenerationModel.js +84 -0
- package/prompt/TextPromptMapping.cjs +50 -0
- package/prompt/TextPromptMapping.d.ts +14 -0
- package/prompt/TextPromptMapping.js +45 -0
- package/prompt/chat/ChatPrompt.cjs +2 -0
- package/prompt/chat/ChatPrompt.d.ts +33 -0
- package/prompt/chat/ChatPrompt.js +1 -0
- package/prompt/chat/trimChatPrompt.cjs +50 -0
- package/prompt/chat/trimChatPrompt.d.ts +19 -0
- package/prompt/chat/trimChatPrompt.js +46 -0
- package/prompt/chat/validateChatPrompt.cjs +36 -0
- package/prompt/chat/validateChatPrompt.d.ts +8 -0
- package/prompt/chat/validateChatPrompt.js +31 -0
- package/prompt/index.cjs +25 -0
- package/prompt/index.d.ts +9 -0
- package/prompt/index.js +9 -0
- package/run/ConsoleLogger.cjs +12 -0
- package/run/ConsoleLogger.d.ts +6 -0
- package/run/ConsoleLogger.js +8 -0
- package/run/DefaultRun.cjs +78 -0
- package/run/DefaultRun.d.ts +24 -0
- package/run/DefaultRun.js +74 -0
- package/run/IdMetadata.cjs +2 -0
- package/run/IdMetadata.d.ts +7 -0
- package/run/IdMetadata.js +1 -0
- package/run/Run.cjs +2 -0
- package/run/Run.d.ts +27 -0
- package/run/Run.js +1 -0
- package/run/RunFunction.cjs +2 -0
- package/run/RunFunction.d.ts +13 -0
- package/run/RunFunction.js +1 -0
- package/run/Vector.cjs +2 -0
- package/run/Vector.d.ts +5 -0
- package/run/Vector.js +1 -0
- package/run/index.cjs +22 -0
- package/run/index.d.ts +6 -0
- package/run/index.js +6 -0
- package/text-chunk/TextChunk.cjs +2 -0
- package/text-chunk/TextChunk.d.ts +3 -0
- package/text-chunk/TextChunk.js +1 -0
- package/text-chunk/index.cjs +22 -0
- package/text-chunk/index.d.ts +6 -0
- package/text-chunk/index.js +6 -0
- package/text-chunk/retrieve-text-chunks/TextChunkRetriever.cjs +2 -0
- package/text-chunk/retrieve-text-chunks/TextChunkRetriever.d.ts +8 -0
- package/text-chunk/retrieve-text-chunks/TextChunkRetriever.js +1 -0
- package/text-chunk/retrieve-text-chunks/retrieveTextChunks.cjs +10 -0
- package/text-chunk/retrieve-text-chunks/retrieveTextChunks.d.ts +6 -0
- package/text-chunk/retrieve-text-chunks/retrieveTextChunks.js +6 -0
- package/text-chunk/split/SplitFunction.cjs +2 -0
- package/text-chunk/split/SplitFunction.d.ts +4 -0
- package/text-chunk/split/SplitFunction.js +1 -0
- package/text-chunk/split/splitOnSeparator.cjs +12 -0
- package/text-chunk/split/splitOnSeparator.d.ts +8 -0
- package/text-chunk/split/splitOnSeparator.js +7 -0
- package/text-chunk/split/splitRecursively.cjs +41 -0
- package/text-chunk/split/splitRecursively.d.ts +22 -0
- package/text-chunk/split/splitRecursively.js +33 -0
- package/util/DurationMeasurement.cjs +42 -0
- package/util/DurationMeasurement.d.ts +5 -0
- package/util/DurationMeasurement.js +38 -0
- package/util/ErrorHandler.cjs +2 -0
- package/util/ErrorHandler.d.ts +1 -0
- package/util/ErrorHandler.js +1 -0
- package/util/SafeResult.cjs +2 -0
- package/util/SafeResult.d.ts +8 -0
- package/util/SafeResult.js +1 -0
- package/util/api/AbortError.cjs +9 -0
- package/util/api/AbortError.d.ts +3 -0
- package/util/api/AbortError.js +5 -0
- package/util/api/ApiCallError.cjs +45 -0
- package/util/api/ApiCallError.d.ts +15 -0
- package/util/api/ApiCallError.js +41 -0
- package/util/api/RetryError.cjs +24 -0
- package/util/api/RetryError.d.ts +10 -0
- package/util/api/RetryError.js +20 -0
- package/util/api/RetryFunction.cjs +2 -0
- package/util/api/RetryFunction.d.ts +1 -0
- package/util/api/RetryFunction.js +1 -0
- package/util/api/ThrottleFunction.cjs +2 -0
- package/util/api/ThrottleFunction.d.ts +1 -0
- package/util/api/ThrottleFunction.js +1 -0
- package/util/api/callWithRetryAndThrottle.cjs +7 -0
- package/util/api/callWithRetryAndThrottle.d.ts +7 -0
- package/util/api/callWithRetryAndThrottle.js +3 -0
- package/util/api/postToApi.cjs +103 -0
- package/util/api/postToApi.d.ts +29 -0
- package/util/api/postToApi.js +96 -0
- package/util/api/retryNever.cjs +8 -0
- package/util/api/retryNever.d.ts +4 -0
- package/util/api/retryNever.js +4 -0
- package/util/api/retryWithExponentialBackoff.cjs +48 -0
- package/util/api/retryWithExponentialBackoff.d.ts +10 -0
- package/util/api/retryWithExponentialBackoff.js +44 -0
- package/util/api/throttleMaxConcurrency.cjs +65 -0
- package/util/api/throttleMaxConcurrency.d.ts +7 -0
- package/util/api/throttleMaxConcurrency.js +61 -0
- package/util/api/throttleUnlimitedConcurrency.cjs +8 -0
- package/util/api/throttleUnlimitedConcurrency.d.ts +5 -0
- package/util/api/throttleUnlimitedConcurrency.js +4 -0
- package/util/cosineSimilarity.cjs +26 -0
- package/util/cosineSimilarity.d.ts +11 -0
- package/util/cosineSimilarity.js +22 -0
- package/util/index.cjs +26 -0
- package/util/index.d.ts +10 -0
- package/util/index.js +10 -0
- package/util/never.cjs +6 -0
- package/util/never.d.ts +1 -0
- package/util/never.js +2 -0
- package/util/runSafe.cjs +15 -0
- package/util/runSafe.d.ts +2 -0
- package/util/runSafe.js +11 -0
- package/vector-index/VectorIndex.cjs +2 -0
- package/vector-index/VectorIndex.d.ts +18 -0
- package/vector-index/VectorIndex.js +1 -0
- package/vector-index/VectorIndexSimilarTextChunkRetriever.cjs +57 -0
- package/vector-index/VectorIndexSimilarTextChunkRetriever.d.ts +20 -0
- package/vector-index/VectorIndexSimilarTextChunkRetriever.js +53 -0
- package/vector-index/VectorIndexTextChunkStore.cjs +77 -0
- package/vector-index/VectorIndexTextChunkStore.d.ts +35 -0
- package/vector-index/VectorIndexTextChunkStore.js +73 -0
- package/vector-index/index.cjs +22 -0
- package/vector-index/index.d.ts +6 -0
- package/vector-index/index.js +6 -0
- package/vector-index/memory/MemoryVectorIndex.cjs +63 -0
- package/vector-index/memory/MemoryVectorIndex.d.ts +31 -0
- package/vector-index/memory/MemoryVectorIndex.js +56 -0
- package/vector-index/pinecone/PineconeVectorIndex.cjs +66 -0
- package/vector-index/pinecone/PineconeVectorIndex.d.ts +29 -0
- package/vector-index/pinecone/PineconeVectorIndex.js +62 -0
- package/vector-index/upsertTextChunks.cjs +15 -0
- package/vector-index/upsertTextChunks.d.ts +11 -0
- package/vector-index/upsertTextChunks.js +11 -0
@@ -0,0 +1,187 @@
|
|
1
|
+
import z from "zod";
|
2
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
+
import { callWithRetryAndThrottle } from "../../util/api/callWithRetryAndThrottle.js";
|
4
|
+
import { createJsonResponseHandler, createTextResponseHandler, postToApi, } from "../../util/api/postToApi.js";
|
5
|
+
import { failedOpenAICallResponseHandler } from "./OpenAIError.js";
|
6
|
+
/**
|
7
|
+
* @see https://openai.com/pricing
|
8
|
+
*/
|
9
|
+
export const OPENAI_TRANSCRIPTION_MODELS = {
|
10
|
+
"whisper-1": {
|
11
|
+
costInMillicentsPerSecond: 10, // = 600 / 60,
|
12
|
+
},
|
13
|
+
};
|
14
|
+
export const calculateOpenAITranscriptionCostInMillicents = ({ model, response, }) => {
|
15
|
+
if (model !== "whisper-1") {
|
16
|
+
return null;
|
17
|
+
}
|
18
|
+
const durationInSeconds = response.duration;
|
19
|
+
return (Math.ceil(durationInSeconds) *
|
20
|
+
OPENAI_TRANSCRIPTION_MODELS[model].costInMillicentsPerSecond);
|
21
|
+
};
|
22
|
+
/**
|
23
|
+
* Create a transcription model that calls the OpenAI transcription API.
|
24
|
+
*
|
25
|
+
* @see https://platform.openai.com/docs/api-reference/audio/create
|
26
|
+
*
|
27
|
+
* @example
|
28
|
+
* const data = await fs.promises.readFile("data/test.mp3");
|
29
|
+
*
|
30
|
+
* const { transcription } = await transcribe(
|
31
|
+
* new OpenAITranscriptionModel({ model: "whisper-1" }),
|
32
|
+
* {
|
33
|
+
* type: "mp3",
|
34
|
+
* data,
|
35
|
+
* }
|
36
|
+
* );
|
37
|
+
*/
|
38
|
+
export class OpenAITranscriptionModel extends AbstractModel {
|
39
|
+
constructor(settings) {
|
40
|
+
super({ settings });
|
41
|
+
Object.defineProperty(this, "provider", {
|
42
|
+
enumerable: true,
|
43
|
+
configurable: true,
|
44
|
+
writable: true,
|
45
|
+
value: "openai"
|
46
|
+
});
|
47
|
+
}
|
48
|
+
get modelName() {
|
49
|
+
return this.settings.model;
|
50
|
+
}
|
51
|
+
generateTranscriptionResponse(data, options) {
|
52
|
+
return this.callAPI(data, {
|
53
|
+
responseFormat: OpenAITranscriptionResponseFormat.verboseJson,
|
54
|
+
functionId: options?.functionId,
|
55
|
+
settings: options?.settings,
|
56
|
+
run: options?.run,
|
57
|
+
});
|
58
|
+
}
|
59
|
+
extractTranscriptionText(response) {
|
60
|
+
return response.text;
|
61
|
+
}
|
62
|
+
get apiKey() {
|
63
|
+
const apiKey = this.settings.apiKey ?? process.env.OPENAI_API_KEY;
|
64
|
+
if (apiKey == null) {
|
65
|
+
throw new Error(`OpenAI API key is missing. Pass it as an argument to the constructor or set it as an environment variable named OPENAI_API_KEY.`);
|
66
|
+
}
|
67
|
+
return apiKey;
|
68
|
+
}
|
69
|
+
async callAPI(data, options) {
|
70
|
+
const run = options?.run;
|
71
|
+
const settings = options?.settings;
|
72
|
+
const responseFormat = options?.responseFormat;
|
73
|
+
const callSettings = Object.assign({
|
74
|
+
apiKey: this.apiKey,
|
75
|
+
}, this.settings, settings, {
|
76
|
+
abortSignal: run?.abortSignal,
|
77
|
+
file: {
|
78
|
+
name: `audio.${data.type}`,
|
79
|
+
data: data.data,
|
80
|
+
},
|
81
|
+
responseFormat,
|
82
|
+
});
|
83
|
+
return callWithRetryAndThrottle({
|
84
|
+
retry: this.settings.retry,
|
85
|
+
throttle: this.settings.throttle,
|
86
|
+
call: async () => callOpenAITranscriptionAPI(callSettings),
|
87
|
+
});
|
88
|
+
}
|
89
|
+
withSettings(additionalSettings) {
|
90
|
+
return new OpenAITranscriptionModel(Object.assign({}, this.settings, additionalSettings));
|
91
|
+
}
|
92
|
+
}
|
93
|
+
/**
|
94
|
+
* Call the OpenAI Transcription API to generate a transcription from an audio file.
|
95
|
+
*
|
96
|
+
* @see https://platform.openai.com/docs/api-reference/audio/create
|
97
|
+
*
|
98
|
+
* @example
|
99
|
+
* const transcriptionResponse = await callOpenAITranscriptionAPI({
|
100
|
+
* apiKey: openAiApiKey,
|
101
|
+
* model: "whisper-1",
|
102
|
+
* file: {
|
103
|
+
* name: "audio.mp3",
|
104
|
+
* data: fileData, // Buffer
|
105
|
+
* },
|
106
|
+
* responseFormat: callOpenAITranscriptionAPI.responseFormat.json,
|
107
|
+
* });
|
108
|
+
*/
|
109
|
+
async function callOpenAITranscriptionAPI({ baseUrl = "https://api.openai.com/v1", abortSignal, apiKey, model, file, prompt, responseFormat, temperature, language, }) {
|
110
|
+
const formData = new FormData();
|
111
|
+
formData.append("file", new Blob([file.data]), file.name);
|
112
|
+
formData.append("model", model);
|
113
|
+
if (prompt) {
|
114
|
+
formData.append("prompt", prompt);
|
115
|
+
}
|
116
|
+
if (responseFormat) {
|
117
|
+
formData.append("response_format", responseFormat.type);
|
118
|
+
}
|
119
|
+
if (temperature) {
|
120
|
+
formData.append("temperature", temperature.toString());
|
121
|
+
}
|
122
|
+
if (language) {
|
123
|
+
formData.append("language", language);
|
124
|
+
}
|
125
|
+
return postToApi({
|
126
|
+
url: `${baseUrl}/audio/transcriptions`,
|
127
|
+
apiKey,
|
128
|
+
contentType: null,
|
129
|
+
body: {
|
130
|
+
content: formData,
|
131
|
+
values: {
|
132
|
+
model,
|
133
|
+
prompt,
|
134
|
+
response_format: responseFormat,
|
135
|
+
temperature,
|
136
|
+
language,
|
137
|
+
},
|
138
|
+
},
|
139
|
+
failedResponseHandler: failedOpenAICallResponseHandler,
|
140
|
+
successfulResponseHandler: responseFormat.handler,
|
141
|
+
abortSignal,
|
142
|
+
});
|
143
|
+
}
|
144
|
+
const openAITranscriptionJsonSchema = z.object({
|
145
|
+
text: z.string(),
|
146
|
+
});
|
147
|
+
const openAITranscriptionVerboseJsonSchema = z.object({
|
148
|
+
task: z.literal("transcribe"),
|
149
|
+
language: z.string(),
|
150
|
+
duration: z.number(),
|
151
|
+
segments: z.array(z.object({
|
152
|
+
id: z.number(),
|
153
|
+
seek: z.number(),
|
154
|
+
start: z.number(),
|
155
|
+
end: z.number(),
|
156
|
+
text: z.string(),
|
157
|
+
tokens: z.array(z.number()),
|
158
|
+
temperature: z.number(),
|
159
|
+
avg_logprob: z.number(),
|
160
|
+
compression_ratio: z.number(),
|
161
|
+
no_speech_prob: z.number(),
|
162
|
+
transient: z.boolean().optional(),
|
163
|
+
})),
|
164
|
+
text: z.string(),
|
165
|
+
});
|
166
|
+
export const OpenAITranscriptionResponseFormat = {
|
167
|
+
json: {
|
168
|
+
type: "json",
|
169
|
+
handler: createJsonResponseHandler(openAITranscriptionJsonSchema),
|
170
|
+
},
|
171
|
+
verboseJson: {
|
172
|
+
type: "verbose_json",
|
173
|
+
handler: createJsonResponseHandler(openAITranscriptionVerboseJsonSchema),
|
174
|
+
},
|
175
|
+
text: {
|
176
|
+
type: "text",
|
177
|
+
handler: createTextResponseHandler(),
|
178
|
+
},
|
179
|
+
srt: {
|
180
|
+
type: "srt",
|
181
|
+
handler: createTextResponseHandler(),
|
182
|
+
},
|
183
|
+
vtt: {
|
184
|
+
type: "vtt",
|
185
|
+
handler: createTextResponseHandler(),
|
186
|
+
},
|
187
|
+
};
|
@@ -0,0 +1,86 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.TikTokenTokenizer = void 0;
|
4
|
+
const js_tiktoken_1 = require("js-tiktoken");
|
5
|
+
const never_js_1 = require("../../util/never.cjs");
|
6
|
+
/**
|
7
|
+
* TikToken tokenizer for OpenAI language models.
|
8
|
+
*
|
9
|
+
* @see https://github.com/openai/tiktoken
|
10
|
+
*
|
11
|
+
* @example
|
12
|
+
* const tokenizer = new TikTokenTokenizer({ model: "gpt-4" });
|
13
|
+
*
|
14
|
+
* const text = "At first, Nox didn't know what to do with the pup.";
|
15
|
+
*
|
16
|
+
* const tokenCount = await countTokens(tokenizer, text);
|
17
|
+
* const tokens = await tokenizer.tokenize(text);
|
18
|
+
* const tokensAndTokenTexts = await tokenizer.tokenizeWithTexts(text);
|
19
|
+
* const reconstructedText = await tokenizer.detokenize(tokens);
|
20
|
+
*/
|
21
|
+
class TikTokenTokenizer {
|
22
|
+
/**
|
23
|
+
* Get a TikToken tokenizer for a specific model or encoding.
|
24
|
+
*/
|
25
|
+
constructor(options) {
|
26
|
+
Object.defineProperty(this, "tiktoken", {
|
27
|
+
enumerable: true,
|
28
|
+
configurable: true,
|
29
|
+
writable: true,
|
30
|
+
value: void 0
|
31
|
+
});
|
32
|
+
this.tiktoken = (0, js_tiktoken_1.getEncoding)("model" in options
|
33
|
+
? getEncodingNameForModel(options.model)
|
34
|
+
: options.encoding);
|
35
|
+
}
|
36
|
+
async tokenize(text) {
|
37
|
+
return this.tiktoken.encode(text);
|
38
|
+
}
|
39
|
+
async tokenizeWithTexts(text) {
|
40
|
+
const tokens = this.tiktoken.encode(text);
|
41
|
+
return {
|
42
|
+
tokens,
|
43
|
+
tokenTexts: tokens.map((token) => this.tiktoken.decode([token])),
|
44
|
+
};
|
45
|
+
}
|
46
|
+
async detokenize(tokens) {
|
47
|
+
return this.tiktoken.decode(tokens);
|
48
|
+
}
|
49
|
+
}
|
50
|
+
exports.TikTokenTokenizer = TikTokenTokenizer;
|
51
|
+
// implemented here (instead of using js-tiktoken) to be able to quickly updated it
|
52
|
+
// when new models are released
|
53
|
+
function getEncodingNameForModel(model) {
|
54
|
+
switch (model) {
|
55
|
+
case "code-davinci-002":
|
56
|
+
case "text-davinci-002":
|
57
|
+
case "text-davinci-003": {
|
58
|
+
return "p50k_base";
|
59
|
+
}
|
60
|
+
case "ada":
|
61
|
+
case "babbage":
|
62
|
+
case "curie":
|
63
|
+
case "davinci":
|
64
|
+
case "text-ada-001":
|
65
|
+
case "text-babbage-001":
|
66
|
+
case "text-curie-001":
|
67
|
+
case "gpt-3.5-turbo":
|
68
|
+
case "gpt-3.5-turbo-0301":
|
69
|
+
case "gpt-3.5-turbo-0613":
|
70
|
+
case "gpt-3.5-turbo-16k":
|
71
|
+
case "gpt-3.5-turbo-16k-0613":
|
72
|
+
case "gpt-4":
|
73
|
+
case "gpt-4-0314":
|
74
|
+
case "gpt-4-0613":
|
75
|
+
case "gpt-4-32k":
|
76
|
+
case "gpt-4-32k-0314":
|
77
|
+
case "gpt-4-32k-0613":
|
78
|
+
case "text-embedding-ada-002": {
|
79
|
+
return "cl100k_base";
|
80
|
+
}
|
81
|
+
default: {
|
82
|
+
(0, never_js_1.never)(model);
|
83
|
+
throw new Error(`Unknown model: ${model}`);
|
84
|
+
}
|
85
|
+
}
|
86
|
+
}
|
@@ -0,0 +1,35 @@
|
|
1
|
+
import { TiktokenEncoding } from "js-tiktoken";
|
2
|
+
import { FullTokenizer } from "../../model-function/tokenize-text/Tokenizer.js";
|
3
|
+
import { OpenAIChatModelType, OpenAITextEmbeddingModelType, OpenAITextGenerationModelType } from "./index.js";
|
4
|
+
/**
|
5
|
+
* TikToken tokenizer for OpenAI language models.
|
6
|
+
*
|
7
|
+
* @see https://github.com/openai/tiktoken
|
8
|
+
*
|
9
|
+
* @example
|
10
|
+
* const tokenizer = new TikTokenTokenizer({ model: "gpt-4" });
|
11
|
+
*
|
12
|
+
* const text = "At first, Nox didn't know what to do with the pup.";
|
13
|
+
*
|
14
|
+
* const tokenCount = await countTokens(tokenizer, text);
|
15
|
+
* const tokens = await tokenizer.tokenize(text);
|
16
|
+
* const tokensAndTokenTexts = await tokenizer.tokenizeWithTexts(text);
|
17
|
+
* const reconstructedText = await tokenizer.detokenize(tokens);
|
18
|
+
*/
|
19
|
+
export declare class TikTokenTokenizer implements FullTokenizer {
|
20
|
+
/**
|
21
|
+
* Get a TikToken tokenizer for a specific model or encoding.
|
22
|
+
*/
|
23
|
+
constructor(options: {
|
24
|
+
model: OpenAIChatModelType | OpenAITextEmbeddingModelType | OpenAITextGenerationModelType;
|
25
|
+
} | {
|
26
|
+
encoding: TiktokenEncoding;
|
27
|
+
});
|
28
|
+
private readonly tiktoken;
|
29
|
+
tokenize(text: string): Promise<number[]>;
|
30
|
+
tokenizeWithTexts(text: string): Promise<{
|
31
|
+
tokens: number[];
|
32
|
+
tokenTexts: string[];
|
33
|
+
}>;
|
34
|
+
detokenize(tokens: number[]): Promise<string>;
|
35
|
+
}
|
@@ -0,0 +1,82 @@
|
|
1
|
+
import { getEncoding } from "js-tiktoken";
|
2
|
+
import { never } from "../../util/never.js";
|
3
|
+
/**
|
4
|
+
* TikToken tokenizer for OpenAI language models.
|
5
|
+
*
|
6
|
+
* @see https://github.com/openai/tiktoken
|
7
|
+
*
|
8
|
+
* @example
|
9
|
+
* const tokenizer = new TikTokenTokenizer({ model: "gpt-4" });
|
10
|
+
*
|
11
|
+
* const text = "At first, Nox didn't know what to do with the pup.";
|
12
|
+
*
|
13
|
+
* const tokenCount = await countTokens(tokenizer, text);
|
14
|
+
* const tokens = await tokenizer.tokenize(text);
|
15
|
+
* const tokensAndTokenTexts = await tokenizer.tokenizeWithTexts(text);
|
16
|
+
* const reconstructedText = await tokenizer.detokenize(tokens);
|
17
|
+
*/
|
18
|
+
export class TikTokenTokenizer {
|
19
|
+
/**
|
20
|
+
* Get a TikToken tokenizer for a specific model or encoding.
|
21
|
+
*/
|
22
|
+
constructor(options) {
|
23
|
+
Object.defineProperty(this, "tiktoken", {
|
24
|
+
enumerable: true,
|
25
|
+
configurable: true,
|
26
|
+
writable: true,
|
27
|
+
value: void 0
|
28
|
+
});
|
29
|
+
this.tiktoken = getEncoding("model" in options
|
30
|
+
? getEncodingNameForModel(options.model)
|
31
|
+
: options.encoding);
|
32
|
+
}
|
33
|
+
async tokenize(text) {
|
34
|
+
return this.tiktoken.encode(text);
|
35
|
+
}
|
36
|
+
async tokenizeWithTexts(text) {
|
37
|
+
const tokens = this.tiktoken.encode(text);
|
38
|
+
return {
|
39
|
+
tokens,
|
40
|
+
tokenTexts: tokens.map((token) => this.tiktoken.decode([token])),
|
41
|
+
};
|
42
|
+
}
|
43
|
+
async detokenize(tokens) {
|
44
|
+
return this.tiktoken.decode(tokens);
|
45
|
+
}
|
46
|
+
}
|
47
|
+
// implemented here (instead of using js-tiktoken) to be able to quickly updated it
|
48
|
+
// when new models are released
|
49
|
+
function getEncodingNameForModel(model) {
|
50
|
+
switch (model) {
|
51
|
+
case "code-davinci-002":
|
52
|
+
case "text-davinci-002":
|
53
|
+
case "text-davinci-003": {
|
54
|
+
return "p50k_base";
|
55
|
+
}
|
56
|
+
case "ada":
|
57
|
+
case "babbage":
|
58
|
+
case "curie":
|
59
|
+
case "davinci":
|
60
|
+
case "text-ada-001":
|
61
|
+
case "text-babbage-001":
|
62
|
+
case "text-curie-001":
|
63
|
+
case "gpt-3.5-turbo":
|
64
|
+
case "gpt-3.5-turbo-0301":
|
65
|
+
case "gpt-3.5-turbo-0613":
|
66
|
+
case "gpt-3.5-turbo-16k":
|
67
|
+
case "gpt-3.5-turbo-16k-0613":
|
68
|
+
case "gpt-4":
|
69
|
+
case "gpt-4-0314":
|
70
|
+
case "gpt-4-0613":
|
71
|
+
case "gpt-4-32k":
|
72
|
+
case "gpt-4-32k-0314":
|
73
|
+
case "gpt-4-32k-0613":
|
74
|
+
case "text-embedding-ada-002": {
|
75
|
+
return "cl100k_base";
|
76
|
+
}
|
77
|
+
default: {
|
78
|
+
never(model);
|
79
|
+
throw new Error(`Unknown model: ${model}`);
|
80
|
+
}
|
81
|
+
}
|
82
|
+
}
|
@@ -0,0 +1,24 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.OpenAIChatMessage = void 0;
|
4
|
+
exports.OpenAIChatMessage = {
|
5
|
+
system(content) {
|
6
|
+
return { role: "system", content };
|
7
|
+
},
|
8
|
+
user(content) {
|
9
|
+
return { role: "user", content };
|
10
|
+
},
|
11
|
+
assistant(content) {
|
12
|
+
return { role: "assistant", content };
|
13
|
+
},
|
14
|
+
functionCall(content, functionCall) {
|
15
|
+
return {
|
16
|
+
role: "assistant",
|
17
|
+
content,
|
18
|
+
function_call: functionCall,
|
19
|
+
};
|
20
|
+
},
|
21
|
+
functionResult(name, content) {
|
22
|
+
return { role: "function", name, content };
|
23
|
+
},
|
24
|
+
};
|
@@ -0,0 +1,26 @@
|
|
1
|
+
export type OpenAIChatMessage = {
|
2
|
+
role: "user" | "assistant" | "system";
|
3
|
+
content: string;
|
4
|
+
name?: string;
|
5
|
+
} | {
|
6
|
+
role: "assistant";
|
7
|
+
content: string | null;
|
8
|
+
function_call: {
|
9
|
+
name: string;
|
10
|
+
arguments: string;
|
11
|
+
};
|
12
|
+
} | {
|
13
|
+
role: "function";
|
14
|
+
content: string;
|
15
|
+
name: string;
|
16
|
+
};
|
17
|
+
export declare const OpenAIChatMessage: {
|
18
|
+
system(content: string): OpenAIChatMessage;
|
19
|
+
user(content: string): OpenAIChatMessage;
|
20
|
+
assistant(content: string): OpenAIChatMessage;
|
21
|
+
functionCall(content: string | null, functionCall: {
|
22
|
+
name: string;
|
23
|
+
arguments: string;
|
24
|
+
}): OpenAIChatMessage;
|
25
|
+
functionResult(name: string, content: string): OpenAIChatMessage;
|
26
|
+
};
|
@@ -0,0 +1,21 @@
|
|
1
|
+
export const OpenAIChatMessage = {
|
2
|
+
system(content) {
|
3
|
+
return { role: "system", content };
|
4
|
+
},
|
5
|
+
user(content) {
|
6
|
+
return { role: "user", content };
|
7
|
+
},
|
8
|
+
assistant(content) {
|
9
|
+
return { role: "assistant", content };
|
10
|
+
},
|
11
|
+
functionCall(content, functionCall) {
|
12
|
+
return {
|
13
|
+
role: "assistant",
|
14
|
+
content,
|
15
|
+
function_call: functionCall,
|
16
|
+
};
|
17
|
+
},
|
18
|
+
functionResult(name, content) {
|
19
|
+
return { role: "function", name, content };
|
20
|
+
},
|
21
|
+
};
|