modelfusion 0.0.44
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +429 -0
- package/composed-function/index.cjs +22 -0
- package/composed-function/index.d.ts +6 -0
- package/composed-function/index.js +6 -0
- package/composed-function/summarize/SummarizationFunction.cjs +2 -0
- package/composed-function/summarize/SummarizationFunction.d.ts +4 -0
- package/composed-function/summarize/SummarizationFunction.js +1 -0
- package/composed-function/summarize/summarizeRecursively.cjs +19 -0
- package/composed-function/summarize/summarizeRecursively.d.ts +11 -0
- package/composed-function/summarize/summarizeRecursively.js +15 -0
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +29 -0
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +24 -0
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +25 -0
- package/composed-function/use-tool/NoSuchToolError.cjs +17 -0
- package/composed-function/use-tool/NoSuchToolError.d.ts +4 -0
- package/composed-function/use-tool/NoSuchToolError.js +13 -0
- package/composed-function/use-tool/Tool.cjs +43 -0
- package/composed-function/use-tool/Tool.d.ts +15 -0
- package/composed-function/use-tool/Tool.js +39 -0
- package/composed-function/use-tool/useTool.cjs +59 -0
- package/composed-function/use-tool/useTool.d.ts +36 -0
- package/composed-function/use-tool/useTool.js +54 -0
- package/cost/Cost.cjs +38 -0
- package/cost/Cost.d.ts +16 -0
- package/cost/Cost.js +34 -0
- package/cost/CostCalculator.cjs +2 -0
- package/cost/CostCalculator.d.ts +8 -0
- package/cost/CostCalculator.js +1 -0
- package/cost/calculateCost.cjs +28 -0
- package/cost/calculateCost.d.ts +7 -0
- package/cost/calculateCost.js +24 -0
- package/cost/index.cjs +19 -0
- package/cost/index.d.ts +3 -0
- package/cost/index.js +3 -0
- package/index.cjs +25 -0
- package/index.d.ts +9 -0
- package/index.js +9 -0
- package/model-function/AbstractModel.cjs +22 -0
- package/model-function/AbstractModel.d.ts +12 -0
- package/model-function/AbstractModel.js +18 -0
- package/model-function/FunctionOptions.cjs +2 -0
- package/model-function/FunctionOptions.d.ts +6 -0
- package/model-function/FunctionOptions.js +1 -0
- package/model-function/Model.cjs +2 -0
- package/model-function/Model.d.ts +23 -0
- package/model-function/Model.js +1 -0
- package/model-function/ModelCallEvent.cjs +2 -0
- package/model-function/ModelCallEvent.d.ts +18 -0
- package/model-function/ModelCallEvent.js +1 -0
- package/model-function/ModelCallEventSource.cjs +42 -0
- package/model-function/ModelCallEventSource.d.ts +13 -0
- package/model-function/ModelCallEventSource.js +38 -0
- package/model-function/ModelCallObserver.cjs +2 -0
- package/model-function/ModelCallObserver.d.ts +5 -0
- package/model-function/ModelCallObserver.js +1 -0
- package/model-function/ModelInformation.cjs +2 -0
- package/model-function/ModelInformation.d.ts +4 -0
- package/model-function/ModelInformation.js +1 -0
- package/model-function/SuccessfulModelCall.cjs +22 -0
- package/model-function/SuccessfulModelCall.d.ts +9 -0
- package/model-function/SuccessfulModelCall.js +18 -0
- package/model-function/embed-text/TextEmbeddingEvent.cjs +2 -0
- package/model-function/embed-text/TextEmbeddingEvent.d.ts +23 -0
- package/model-function/embed-text/TextEmbeddingEvent.js +1 -0
- package/model-function/embed-text/TextEmbeddingModel.cjs +2 -0
- package/model-function/embed-text/TextEmbeddingModel.d.ts +18 -0
- package/model-function/embed-text/TextEmbeddingModel.js +1 -0
- package/model-function/embed-text/embedText.cjs +90 -0
- package/model-function/embed-text/embedText.d.ts +33 -0
- package/model-function/embed-text/embedText.js +85 -0
- package/model-function/executeCall.cjs +60 -0
- package/model-function/executeCall.d.ts +27 -0
- package/model-function/executeCall.js +56 -0
- package/model-function/generate-image/ImageGenerationEvent.cjs +2 -0
- package/model-function/generate-image/ImageGenerationEvent.d.ts +22 -0
- package/model-function/generate-image/ImageGenerationEvent.js +1 -0
- package/model-function/generate-image/ImageGenerationModel.cjs +2 -0
- package/model-function/generate-image/ImageGenerationModel.d.ts +8 -0
- package/model-function/generate-image/ImageGenerationModel.js +1 -0
- package/model-function/generate-image/generateImage.cjs +63 -0
- package/model-function/generate-image/generateImage.d.ts +23 -0
- package/model-function/generate-image/generateImage.js +59 -0
- package/model-function/generate-json/GenerateJsonModel.cjs +2 -0
- package/model-function/generate-json/GenerateJsonModel.d.ts +10 -0
- package/model-function/generate-json/GenerateJsonModel.js +1 -0
- package/model-function/generate-json/GenerateJsonOrTextModel.cjs +2 -0
- package/model-function/generate-json/GenerateJsonOrTextModel.d.ts +18 -0
- package/model-function/generate-json/GenerateJsonOrTextModel.js +1 -0
- package/model-function/generate-json/JsonGenerationEvent.cjs +2 -0
- package/model-function/generate-json/JsonGenerationEvent.d.ts +22 -0
- package/model-function/generate-json/JsonGenerationEvent.js +1 -0
- package/model-function/generate-json/NoSuchSchemaError.cjs +17 -0
- package/model-function/generate-json/NoSuchSchemaError.d.ts +4 -0
- package/model-function/generate-json/NoSuchSchemaError.js +13 -0
- package/model-function/generate-json/SchemaDefinition.cjs +2 -0
- package/model-function/generate-json/SchemaDefinition.d.ts +6 -0
- package/model-function/generate-json/SchemaDefinition.js +1 -0
- package/model-function/generate-json/SchemaValidationError.cjs +36 -0
- package/model-function/generate-json/SchemaValidationError.d.ts +11 -0
- package/model-function/generate-json/SchemaValidationError.js +32 -0
- package/model-function/generate-json/generateJson.cjs +61 -0
- package/model-function/generate-json/generateJson.d.ts +9 -0
- package/model-function/generate-json/generateJson.js +57 -0
- package/model-function/generate-json/generateJsonOrText.cjs +74 -0
- package/model-function/generate-json/generateJsonOrText.d.ts +25 -0
- package/model-function/generate-json/generateJsonOrText.js +70 -0
- package/model-function/generate-text/AsyncQueue.cjs +66 -0
- package/model-function/generate-text/AsyncQueue.d.ts +17 -0
- package/model-function/generate-text/AsyncQueue.js +62 -0
- package/model-function/generate-text/DeltaEvent.cjs +2 -0
- package/model-function/generate-text/DeltaEvent.d.ts +7 -0
- package/model-function/generate-text/DeltaEvent.js +1 -0
- package/model-function/generate-text/TextDeltaEventSource.cjs +54 -0
- package/model-function/generate-text/TextDeltaEventSource.d.ts +5 -0
- package/model-function/generate-text/TextDeltaEventSource.js +46 -0
- package/model-function/generate-text/TextGenerationEvent.cjs +2 -0
- package/model-function/generate-text/TextGenerationEvent.d.ts +22 -0
- package/model-function/generate-text/TextGenerationEvent.js +1 -0
- package/model-function/generate-text/TextGenerationModel.cjs +2 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +42 -0
- package/model-function/generate-text/TextGenerationModel.js +1 -0
- package/model-function/generate-text/TextStreamingEvent.cjs +2 -0
- package/model-function/generate-text/TextStreamingEvent.d.ts +22 -0
- package/model-function/generate-text/TextStreamingEvent.js +1 -0
- package/model-function/generate-text/extractTextDeltas.cjs +23 -0
- package/model-function/generate-text/extractTextDeltas.d.ts +7 -0
- package/model-function/generate-text/extractTextDeltas.js +19 -0
- package/model-function/generate-text/generateText.cjs +67 -0
- package/model-function/generate-text/generateText.d.ts +20 -0
- package/model-function/generate-text/generateText.js +63 -0
- package/model-function/generate-text/parseEventSourceReadableStream.cjs +30 -0
- package/model-function/generate-text/parseEventSourceReadableStream.d.ts +8 -0
- package/model-function/generate-text/parseEventSourceReadableStream.js +26 -0
- package/model-function/generate-text/streamText.cjs +115 -0
- package/model-function/generate-text/streamText.d.ts +11 -0
- package/model-function/generate-text/streamText.js +111 -0
- package/model-function/index.cjs +47 -0
- package/model-function/index.d.ts +31 -0
- package/model-function/index.js +31 -0
- package/model-function/tokenize-text/Tokenizer.cjs +2 -0
- package/model-function/tokenize-text/Tokenizer.d.ts +19 -0
- package/model-function/tokenize-text/Tokenizer.js +1 -0
- package/model-function/tokenize-text/countTokens.cjs +10 -0
- package/model-function/tokenize-text/countTokens.d.ts +5 -0
- package/model-function/tokenize-text/countTokens.js +6 -0
- package/model-function/transcribe-audio/TranscriptionEvent.cjs +2 -0
- package/model-function/transcribe-audio/TranscriptionEvent.d.ts +22 -0
- package/model-function/transcribe-audio/TranscriptionEvent.js +1 -0
- package/model-function/transcribe-audio/TranscriptionModel.cjs +2 -0
- package/model-function/transcribe-audio/TranscriptionModel.d.ts +8 -0
- package/model-function/transcribe-audio/TranscriptionModel.js +1 -0
- package/model-function/transcribe-audio/transcribe.cjs +62 -0
- package/model-function/transcribe-audio/transcribe.d.ts +22 -0
- package/model-function/transcribe-audio/transcribe.js +58 -0
- package/model-provider/automatic1111/Automatic1111Error.cjs +39 -0
- package/model-provider/automatic1111/Automatic1111Error.d.ts +31 -0
- package/model-provider/automatic1111/Automatic1111Error.js +31 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +76 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +54 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +72 -0
- package/model-provider/automatic1111/index.cjs +20 -0
- package/model-provider/automatic1111/index.d.ts +2 -0
- package/model-provider/automatic1111/index.js +2 -0
- package/model-provider/cohere/CohereError.cjs +36 -0
- package/model-provider/cohere/CohereError.d.ts +22 -0
- package/model-provider/cohere/CohereError.js +28 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.cjs +172 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +119 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.js +165 -0
- package/model-provider/cohere/CohereTextGenerationModel.cjs +283 -0
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +203 -0
- package/model-provider/cohere/CohereTextGenerationModel.js +276 -0
- package/model-provider/cohere/CohereTokenizer.cjs +136 -0
- package/model-provider/cohere/CohereTokenizer.d.ts +118 -0
- package/model-provider/cohere/CohereTokenizer.js +129 -0
- package/model-provider/cohere/index.cjs +22 -0
- package/model-provider/cohere/index.d.ts +4 -0
- package/model-provider/cohere/index.js +4 -0
- package/model-provider/huggingface/HuggingFaceError.cjs +52 -0
- package/model-provider/huggingface/HuggingFaceError.d.ts +22 -0
- package/model-provider/huggingface/HuggingFaceError.js +44 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +174 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +75 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +167 -0
- package/model-provider/huggingface/index.cjs +20 -0
- package/model-provider/huggingface/index.d.ts +2 -0
- package/model-provider/huggingface/index.js +2 -0
- package/model-provider/index.cjs +22 -0
- package/model-provider/index.d.ts +6 -0
- package/model-provider/index.js +6 -0
- package/model-provider/llamacpp/LlamaCppError.cjs +52 -0
- package/model-provider/llamacpp/LlamaCppError.d.ts +22 -0
- package/model-provider/llamacpp/LlamaCppError.js +44 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +96 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +40 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +89 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +245 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +399 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +238 -0
- package/model-provider/llamacpp/LlamaCppTokenizer.cjs +64 -0
- package/model-provider/llamacpp/LlamaCppTokenizer.d.ts +38 -0
- package/model-provider/llamacpp/LlamaCppTokenizer.js +57 -0
- package/model-provider/llamacpp/index.cjs +22 -0
- package/model-provider/llamacpp/index.d.ts +4 -0
- package/model-provider/llamacpp/index.js +4 -0
- package/model-provider/openai/OpenAICostCalculator.cjs +71 -0
- package/model-provider/openai/OpenAICostCalculator.d.ts +6 -0
- package/model-provider/openai/OpenAICostCalculator.js +67 -0
- package/model-provider/openai/OpenAIError.cjs +50 -0
- package/model-provider/openai/OpenAIError.d.ts +47 -0
- package/model-provider/openai/OpenAIError.js +42 -0
- package/model-provider/openai/OpenAIImageGenerationModel.cjs +124 -0
- package/model-provider/openai/OpenAIImageGenerationModel.d.ts +113 -0
- package/model-provider/openai/OpenAIImageGenerationModel.js +119 -0
- package/model-provider/openai/OpenAIModelSettings.cjs +2 -0
- package/model-provider/openai/OpenAIModelSettings.d.ts +8 -0
- package/model-provider/openai/OpenAIModelSettings.js +1 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +171 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +122 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.js +162 -0
- package/model-provider/openai/OpenAITextGenerationModel.cjs +326 -0
- package/model-provider/openai/OpenAITextGenerationModel.d.ts +254 -0
- package/model-provider/openai/OpenAITextGenerationModel.js +317 -0
- package/model-provider/openai/OpenAITranscriptionModel.cjs +195 -0
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +196 -0
- package/model-provider/openai/OpenAITranscriptionModel.js +187 -0
- package/model-provider/openai/TikTokenTokenizer.cjs +86 -0
- package/model-provider/openai/TikTokenTokenizer.d.ts +35 -0
- package/model-provider/openai/TikTokenTokenizer.js +82 -0
- package/model-provider/openai/chat/OpenAIChatMessage.cjs +24 -0
- package/model-provider/openai/chat/OpenAIChatMessage.d.ts +26 -0
- package/model-provider/openai/chat/OpenAIChatMessage.js +21 -0
- package/model-provider/openai/chat/OpenAIChatModel.cjs +288 -0
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +344 -0
- package/model-provider/openai/chat/OpenAIChatModel.js +279 -0
- package/model-provider/openai/chat/OpenAIChatPrompt.cjs +143 -0
- package/model-provider/openai/chat/OpenAIChatPrompt.d.ts +108 -0
- package/model-provider/openai/chat/OpenAIChatPrompt.js +135 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +112 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +19 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +105 -0
- package/model-provider/openai/chat/countOpenAIChatMessageTokens.cjs +28 -0
- package/model-provider/openai/chat/countOpenAIChatMessageTokens.d.ts +20 -0
- package/model-provider/openai/chat/countOpenAIChatMessageTokens.js +23 -0
- package/model-provider/openai/index.cjs +31 -0
- package/model-provider/openai/index.d.ts +13 -0
- package/model-provider/openai/index.js +12 -0
- package/model-provider/stability/StabilityError.cjs +36 -0
- package/model-provider/stability/StabilityError.d.ts +22 -0
- package/model-provider/stability/StabilityError.js +28 -0
- package/model-provider/stability/StabilityImageGenerationModel.cjs +133 -0
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +95 -0
- package/model-provider/stability/StabilityImageGenerationModel.js +129 -0
- package/model-provider/stability/index.cjs +20 -0
- package/model-provider/stability/index.d.ts +2 -0
- package/model-provider/stability/index.js +2 -0
- package/package.json +87 -0
- package/prompt/InstructionPrompt.cjs +2 -0
- package/prompt/InstructionPrompt.d.ts +7 -0
- package/prompt/InstructionPrompt.js +1 -0
- package/prompt/Llama2PromptMapping.cjs +56 -0
- package/prompt/Llama2PromptMapping.d.ts +10 -0
- package/prompt/Llama2PromptMapping.js +51 -0
- package/prompt/OpenAIChatPromptMapping.cjs +62 -0
- package/prompt/OpenAIChatPromptMapping.d.ts +6 -0
- package/prompt/OpenAIChatPromptMapping.js +57 -0
- package/prompt/PromptMapping.cjs +2 -0
- package/prompt/PromptMapping.d.ts +7 -0
- package/prompt/PromptMapping.js +1 -0
- package/prompt/PromptMappingTextGenerationModel.cjs +88 -0
- package/prompt/PromptMappingTextGenerationModel.d.ts +26 -0
- package/prompt/PromptMappingTextGenerationModel.js +84 -0
- package/prompt/TextPromptMapping.cjs +50 -0
- package/prompt/TextPromptMapping.d.ts +14 -0
- package/prompt/TextPromptMapping.js +45 -0
- package/prompt/chat/ChatPrompt.cjs +2 -0
- package/prompt/chat/ChatPrompt.d.ts +33 -0
- package/prompt/chat/ChatPrompt.js +1 -0
- package/prompt/chat/trimChatPrompt.cjs +50 -0
- package/prompt/chat/trimChatPrompt.d.ts +19 -0
- package/prompt/chat/trimChatPrompt.js +46 -0
- package/prompt/chat/validateChatPrompt.cjs +36 -0
- package/prompt/chat/validateChatPrompt.d.ts +8 -0
- package/prompt/chat/validateChatPrompt.js +31 -0
- package/prompt/index.cjs +25 -0
- package/prompt/index.d.ts +9 -0
- package/prompt/index.js +9 -0
- package/run/ConsoleLogger.cjs +12 -0
- package/run/ConsoleLogger.d.ts +6 -0
- package/run/ConsoleLogger.js +8 -0
- package/run/DefaultRun.cjs +78 -0
- package/run/DefaultRun.d.ts +24 -0
- package/run/DefaultRun.js +74 -0
- package/run/IdMetadata.cjs +2 -0
- package/run/IdMetadata.d.ts +7 -0
- package/run/IdMetadata.js +1 -0
- package/run/Run.cjs +2 -0
- package/run/Run.d.ts +27 -0
- package/run/Run.js +1 -0
- package/run/RunFunction.cjs +2 -0
- package/run/RunFunction.d.ts +13 -0
- package/run/RunFunction.js +1 -0
- package/run/Vector.cjs +2 -0
- package/run/Vector.d.ts +5 -0
- package/run/Vector.js +1 -0
- package/run/index.cjs +22 -0
- package/run/index.d.ts +6 -0
- package/run/index.js +6 -0
- package/text-chunk/TextChunk.cjs +2 -0
- package/text-chunk/TextChunk.d.ts +3 -0
- package/text-chunk/TextChunk.js +1 -0
- package/text-chunk/index.cjs +22 -0
- package/text-chunk/index.d.ts +6 -0
- package/text-chunk/index.js +6 -0
- package/text-chunk/retrieve-text-chunks/TextChunkRetriever.cjs +2 -0
- package/text-chunk/retrieve-text-chunks/TextChunkRetriever.d.ts +8 -0
- package/text-chunk/retrieve-text-chunks/TextChunkRetriever.js +1 -0
- package/text-chunk/retrieve-text-chunks/retrieveTextChunks.cjs +10 -0
- package/text-chunk/retrieve-text-chunks/retrieveTextChunks.d.ts +6 -0
- package/text-chunk/retrieve-text-chunks/retrieveTextChunks.js +6 -0
- package/text-chunk/split/SplitFunction.cjs +2 -0
- package/text-chunk/split/SplitFunction.d.ts +4 -0
- package/text-chunk/split/SplitFunction.js +1 -0
- package/text-chunk/split/splitOnSeparator.cjs +12 -0
- package/text-chunk/split/splitOnSeparator.d.ts +8 -0
- package/text-chunk/split/splitOnSeparator.js +7 -0
- package/text-chunk/split/splitRecursively.cjs +41 -0
- package/text-chunk/split/splitRecursively.d.ts +22 -0
- package/text-chunk/split/splitRecursively.js +33 -0
- package/util/DurationMeasurement.cjs +42 -0
- package/util/DurationMeasurement.d.ts +5 -0
- package/util/DurationMeasurement.js +38 -0
- package/util/ErrorHandler.cjs +2 -0
- package/util/ErrorHandler.d.ts +1 -0
- package/util/ErrorHandler.js +1 -0
- package/util/SafeResult.cjs +2 -0
- package/util/SafeResult.d.ts +8 -0
- package/util/SafeResult.js +1 -0
- package/util/api/AbortError.cjs +9 -0
- package/util/api/AbortError.d.ts +3 -0
- package/util/api/AbortError.js +5 -0
- package/util/api/ApiCallError.cjs +45 -0
- package/util/api/ApiCallError.d.ts +15 -0
- package/util/api/ApiCallError.js +41 -0
- package/util/api/RetryError.cjs +24 -0
- package/util/api/RetryError.d.ts +10 -0
- package/util/api/RetryError.js +20 -0
- package/util/api/RetryFunction.cjs +2 -0
- package/util/api/RetryFunction.d.ts +1 -0
- package/util/api/RetryFunction.js +1 -0
- package/util/api/ThrottleFunction.cjs +2 -0
- package/util/api/ThrottleFunction.d.ts +1 -0
- package/util/api/ThrottleFunction.js +1 -0
- package/util/api/callWithRetryAndThrottle.cjs +7 -0
- package/util/api/callWithRetryAndThrottle.d.ts +7 -0
- package/util/api/callWithRetryAndThrottle.js +3 -0
- package/util/api/postToApi.cjs +103 -0
- package/util/api/postToApi.d.ts +29 -0
- package/util/api/postToApi.js +96 -0
- package/util/api/retryNever.cjs +8 -0
- package/util/api/retryNever.d.ts +4 -0
- package/util/api/retryNever.js +4 -0
- package/util/api/retryWithExponentialBackoff.cjs +48 -0
- package/util/api/retryWithExponentialBackoff.d.ts +10 -0
- package/util/api/retryWithExponentialBackoff.js +44 -0
- package/util/api/throttleMaxConcurrency.cjs +65 -0
- package/util/api/throttleMaxConcurrency.d.ts +7 -0
- package/util/api/throttleMaxConcurrency.js +61 -0
- package/util/api/throttleUnlimitedConcurrency.cjs +8 -0
- package/util/api/throttleUnlimitedConcurrency.d.ts +5 -0
- package/util/api/throttleUnlimitedConcurrency.js +4 -0
- package/util/cosineSimilarity.cjs +26 -0
- package/util/cosineSimilarity.d.ts +11 -0
- package/util/cosineSimilarity.js +22 -0
- package/util/index.cjs +26 -0
- package/util/index.d.ts +10 -0
- package/util/index.js +10 -0
- package/util/never.cjs +6 -0
- package/util/never.d.ts +1 -0
- package/util/never.js +2 -0
- package/util/runSafe.cjs +15 -0
- package/util/runSafe.d.ts +2 -0
- package/util/runSafe.js +11 -0
- package/vector-index/VectorIndex.cjs +2 -0
- package/vector-index/VectorIndex.d.ts +18 -0
- package/vector-index/VectorIndex.js +1 -0
- package/vector-index/VectorIndexSimilarTextChunkRetriever.cjs +57 -0
- package/vector-index/VectorIndexSimilarTextChunkRetriever.d.ts +20 -0
- package/vector-index/VectorIndexSimilarTextChunkRetriever.js +53 -0
- package/vector-index/VectorIndexTextChunkStore.cjs +77 -0
- package/vector-index/VectorIndexTextChunkStore.d.ts +35 -0
- package/vector-index/VectorIndexTextChunkStore.js +73 -0
- package/vector-index/index.cjs +22 -0
- package/vector-index/index.d.ts +6 -0
- package/vector-index/index.js +6 -0
- package/vector-index/memory/MemoryVectorIndex.cjs +63 -0
- package/vector-index/memory/MemoryVectorIndex.d.ts +31 -0
- package/vector-index/memory/MemoryVectorIndex.js +56 -0
- package/vector-index/pinecone/PineconeVectorIndex.cjs +66 -0
- package/vector-index/pinecone/PineconeVectorIndex.d.ts +29 -0
- package/vector-index/pinecone/PineconeVectorIndex.js +62 -0
- package/vector-index/upsertTextChunks.cjs +15 -0
- package/vector-index/upsertTextChunks.d.ts +11 -0
- package/vector-index/upsertTextChunks.js +11 -0
@@ -0,0 +1,66 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.AsyncQueue = void 0;
|
4
|
+
/**
|
5
|
+
* @internal
|
6
|
+
*/
|
7
|
+
class AsyncQueue {
|
8
|
+
constructor() {
|
9
|
+
Object.defineProperty(this, "queue", {
|
10
|
+
enumerable: true,
|
11
|
+
configurable: true,
|
12
|
+
writable: true,
|
13
|
+
value: void 0
|
14
|
+
});
|
15
|
+
Object.defineProperty(this, "resolvers", {
|
16
|
+
enumerable: true,
|
17
|
+
configurable: true,
|
18
|
+
writable: true,
|
19
|
+
value: []
|
20
|
+
});
|
21
|
+
Object.defineProperty(this, "closed", {
|
22
|
+
enumerable: true,
|
23
|
+
configurable: true,
|
24
|
+
writable: true,
|
25
|
+
value: void 0
|
26
|
+
});
|
27
|
+
this.queue = [];
|
28
|
+
this.resolvers = [];
|
29
|
+
this.closed = false;
|
30
|
+
}
|
31
|
+
push(value) {
|
32
|
+
if (this.closed) {
|
33
|
+
throw new Error("Pushing to a closed queue");
|
34
|
+
}
|
35
|
+
const resolve = this.resolvers.shift();
|
36
|
+
if (resolve) {
|
37
|
+
resolve({ value, done: false });
|
38
|
+
}
|
39
|
+
else {
|
40
|
+
this.queue.push(value);
|
41
|
+
}
|
42
|
+
}
|
43
|
+
close() {
|
44
|
+
while (this.resolvers.length) {
|
45
|
+
const resolve = this.resolvers.shift();
|
46
|
+
resolve?.({ value: undefined, done: true });
|
47
|
+
}
|
48
|
+
this.closed = true;
|
49
|
+
}
|
50
|
+
[Symbol.asyncIterator]() {
|
51
|
+
return {
|
52
|
+
next: () => {
|
53
|
+
if (this.queue.length > 0) {
|
54
|
+
return Promise.resolve({ value: this.queue.shift(), done: false });
|
55
|
+
}
|
56
|
+
else if (this.closed) {
|
57
|
+
return Promise.resolve({ value: undefined, done: true });
|
58
|
+
}
|
59
|
+
else {
|
60
|
+
return new Promise((resolve) => this.resolvers.push(resolve));
|
61
|
+
}
|
62
|
+
},
|
63
|
+
};
|
64
|
+
}
|
65
|
+
}
|
66
|
+
exports.AsyncQueue = AsyncQueue;
|
@@ -0,0 +1,17 @@
|
|
1
|
+
/**
|
2
|
+
* @internal
|
3
|
+
*/
|
4
|
+
export declare class AsyncQueue<T> implements AsyncIterable<T | undefined> {
|
5
|
+
queue: T[];
|
6
|
+
resolvers: Array<(options: {
|
7
|
+
value: T | undefined;
|
8
|
+
done: boolean;
|
9
|
+
}) => void>;
|
10
|
+
closed: boolean;
|
11
|
+
constructor();
|
12
|
+
push(value: T): void;
|
13
|
+
close(): void;
|
14
|
+
[Symbol.asyncIterator](): {
|
15
|
+
next: () => Promise<IteratorResult<T | undefined, T | undefined>>;
|
16
|
+
};
|
17
|
+
}
|
@@ -0,0 +1,62 @@
|
|
1
|
+
/**
|
2
|
+
* @internal
|
3
|
+
*/
|
4
|
+
export class AsyncQueue {
|
5
|
+
constructor() {
|
6
|
+
Object.defineProperty(this, "queue", {
|
7
|
+
enumerable: true,
|
8
|
+
configurable: true,
|
9
|
+
writable: true,
|
10
|
+
value: void 0
|
11
|
+
});
|
12
|
+
Object.defineProperty(this, "resolvers", {
|
13
|
+
enumerable: true,
|
14
|
+
configurable: true,
|
15
|
+
writable: true,
|
16
|
+
value: []
|
17
|
+
});
|
18
|
+
Object.defineProperty(this, "closed", {
|
19
|
+
enumerable: true,
|
20
|
+
configurable: true,
|
21
|
+
writable: true,
|
22
|
+
value: void 0
|
23
|
+
});
|
24
|
+
this.queue = [];
|
25
|
+
this.resolvers = [];
|
26
|
+
this.closed = false;
|
27
|
+
}
|
28
|
+
push(value) {
|
29
|
+
if (this.closed) {
|
30
|
+
throw new Error("Pushing to a closed queue");
|
31
|
+
}
|
32
|
+
const resolve = this.resolvers.shift();
|
33
|
+
if (resolve) {
|
34
|
+
resolve({ value, done: false });
|
35
|
+
}
|
36
|
+
else {
|
37
|
+
this.queue.push(value);
|
38
|
+
}
|
39
|
+
}
|
40
|
+
close() {
|
41
|
+
while (this.resolvers.length) {
|
42
|
+
const resolve = this.resolvers.shift();
|
43
|
+
resolve?.({ value: undefined, done: true });
|
44
|
+
}
|
45
|
+
this.closed = true;
|
46
|
+
}
|
47
|
+
[Symbol.asyncIterator]() {
|
48
|
+
return {
|
49
|
+
next: () => {
|
50
|
+
if (this.queue.length > 0) {
|
51
|
+
return Promise.resolve({ value: this.queue.shift(), done: false });
|
52
|
+
}
|
53
|
+
else if (this.closed) {
|
54
|
+
return Promise.resolve({ value: undefined, done: true });
|
55
|
+
}
|
56
|
+
else {
|
57
|
+
return new Promise((resolve) => this.resolvers.push(resolve));
|
58
|
+
}
|
59
|
+
},
|
60
|
+
};
|
61
|
+
}
|
62
|
+
}
|
@@ -0,0 +1 @@
|
|
1
|
+
export {};
|
@@ -0,0 +1,54 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
4
|
+
};
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
6
|
+
exports.parseTextDeltaEventSource = exports.createTextDeltaEventSource = void 0;
|
7
|
+
const secure_json_parse_1 = __importDefault(require("secure-json-parse"));
|
8
|
+
const zod_1 = require("zod");
|
9
|
+
const AsyncQueue_js_1 = require("./AsyncQueue.cjs");
|
10
|
+
const parseEventSourceReadableStream_js_1 = require("./parseEventSourceReadableStream.cjs");
|
11
|
+
const textEncoder = new TextEncoder();
|
12
|
+
const textDeltaEventDataSchema = zod_1.z.object({
|
13
|
+
textDelta: zod_1.z.string().optional(),
|
14
|
+
isFinished: zod_1.z.boolean(),
|
15
|
+
});
|
16
|
+
function enqueueData(controller, data) {
|
17
|
+
controller.enqueue(textEncoder.encode(`data: ${JSON.stringify(data)}\n\n`));
|
18
|
+
}
|
19
|
+
function createTextDeltaEventSource(textDeltas) {
|
20
|
+
return new ReadableStream({
|
21
|
+
async start(controller) {
|
22
|
+
for await (const textDelta of textDeltas) {
|
23
|
+
enqueueData(controller, { textDelta, isFinished: false });
|
24
|
+
}
|
25
|
+
enqueueData(controller, { isFinished: true });
|
26
|
+
},
|
27
|
+
});
|
28
|
+
}
|
29
|
+
exports.createTextDeltaEventSource = createTextDeltaEventSource;
|
30
|
+
function parseTextDeltaEventSource(stream, options) {
|
31
|
+
const queue = new AsyncQueue_js_1.AsyncQueue();
|
32
|
+
// run async (no await on purpose):
|
33
|
+
(0, parseEventSourceReadableStream_js_1.parseEventSourceReadableStream)({
|
34
|
+
stream,
|
35
|
+
callback: (event) => {
|
36
|
+
if (event.type !== "event") {
|
37
|
+
return;
|
38
|
+
}
|
39
|
+
try {
|
40
|
+
const data = textDeltaEventDataSchema.parse(secure_json_parse_1.default.parse(event.data));
|
41
|
+
queue.push(data.textDelta);
|
42
|
+
if (data.isFinished) {
|
43
|
+
queue.close();
|
44
|
+
}
|
45
|
+
}
|
46
|
+
catch (error) {
|
47
|
+
options?.errorHandler(error);
|
48
|
+
queue.close();
|
49
|
+
}
|
50
|
+
},
|
51
|
+
});
|
52
|
+
return queue;
|
53
|
+
}
|
54
|
+
exports.parseTextDeltaEventSource = parseTextDeltaEventSource;
|
@@ -0,0 +1,5 @@
|
|
1
|
+
import { ErrorHandler } from "../../util/ErrorHandler.js";
|
2
|
+
export declare function createTextDeltaEventSource(textDeltas: AsyncIterable<string>): ReadableStream<any>;
|
3
|
+
export declare function parseTextDeltaEventSource(stream: ReadableStream<Uint8Array>, options?: {
|
4
|
+
errorHandler: ErrorHandler;
|
5
|
+
}): AsyncIterable<string | undefined>;
|
@@ -0,0 +1,46 @@
|
|
1
|
+
import SecureJSON from "secure-json-parse";
|
2
|
+
import { z } from "zod";
|
3
|
+
import { AsyncQueue } from "./AsyncQueue.js";
|
4
|
+
import { parseEventSourceReadableStream } from "./parseEventSourceReadableStream.js";
|
5
|
+
const textEncoder = new TextEncoder();
|
6
|
+
const textDeltaEventDataSchema = z.object({
|
7
|
+
textDelta: z.string().optional(),
|
8
|
+
isFinished: z.boolean(),
|
9
|
+
});
|
10
|
+
function enqueueData(controller, data) {
|
11
|
+
controller.enqueue(textEncoder.encode(`data: ${JSON.stringify(data)}\n\n`));
|
12
|
+
}
|
13
|
+
export function createTextDeltaEventSource(textDeltas) {
|
14
|
+
return new ReadableStream({
|
15
|
+
async start(controller) {
|
16
|
+
for await (const textDelta of textDeltas) {
|
17
|
+
enqueueData(controller, { textDelta, isFinished: false });
|
18
|
+
}
|
19
|
+
enqueueData(controller, { isFinished: true });
|
20
|
+
},
|
21
|
+
});
|
22
|
+
}
|
23
|
+
export function parseTextDeltaEventSource(stream, options) {
|
24
|
+
const queue = new AsyncQueue();
|
25
|
+
// run async (no await on purpose):
|
26
|
+
parseEventSourceReadableStream({
|
27
|
+
stream,
|
28
|
+
callback: (event) => {
|
29
|
+
if (event.type !== "event") {
|
30
|
+
return;
|
31
|
+
}
|
32
|
+
try {
|
33
|
+
const data = textDeltaEventDataSchema.parse(SecureJSON.parse(event.data));
|
34
|
+
queue.push(data.textDelta);
|
35
|
+
if (data.isFinished) {
|
36
|
+
queue.close();
|
37
|
+
}
|
38
|
+
}
|
39
|
+
catch (error) {
|
40
|
+
options?.errorHandler(error);
|
41
|
+
queue.close();
|
42
|
+
}
|
43
|
+
},
|
44
|
+
});
|
45
|
+
return queue;
|
46
|
+
}
|
@@ -0,0 +1,22 @@
|
|
1
|
+
import { ModelCallFinishedEventMetadata, ModelCallStartedEventMetadata } from "../ModelCallEvent.js";
|
2
|
+
export type TextGenerationStartedEvent = {
|
3
|
+
type: "text-generation-started";
|
4
|
+
metadata: ModelCallStartedEventMetadata;
|
5
|
+
settings: unknown;
|
6
|
+
prompt: unknown;
|
7
|
+
};
|
8
|
+
export type TextGenerationFinishedEvent = {
|
9
|
+
type: "text-generation-finished";
|
10
|
+
metadata: ModelCallFinishedEventMetadata;
|
11
|
+
settings: unknown;
|
12
|
+
prompt: unknown;
|
13
|
+
} & ({
|
14
|
+
status: "success";
|
15
|
+
response: unknown;
|
16
|
+
generatedText: string;
|
17
|
+
} | {
|
18
|
+
status: "failure";
|
19
|
+
error: unknown;
|
20
|
+
} | {
|
21
|
+
status: "abort";
|
22
|
+
});
|
@@ -0,0 +1 @@
|
|
1
|
+
export {};
|
@@ -0,0 +1,42 @@
|
|
1
|
+
import { PromptMapping } from "../../prompt/PromptMapping.js";
|
2
|
+
import { PromptMappingTextGenerationModel } from "../../prompt/PromptMappingTextGenerationModel.js";
|
3
|
+
import { FunctionOptions } from "../FunctionOptions.js";
|
4
|
+
import { Model, ModelSettings } from "../Model.js";
|
5
|
+
import { BasicTokenizer, FullTokenizer } from "../tokenize-text/Tokenizer.js";
|
6
|
+
import { DeltaEvent } from "./DeltaEvent.js";
|
7
|
+
export interface TextGenerationModelSettings extends ModelSettings {
|
8
|
+
trimOutput?: boolean;
|
9
|
+
}
|
10
|
+
export interface TextGenerationModel<PROMPT, RESPONSE, FULL_DELTA, SETTINGS extends TextGenerationModelSettings> extends Model<SETTINGS> {
|
11
|
+
readonly contextWindowSize: number | undefined;
|
12
|
+
readonly tokenizer: BasicTokenizer | FullTokenizer | undefined;
|
13
|
+
/**
|
14
|
+
* Optional. Implement if you have a tokenizer and want to count the number of tokens in a prompt.
|
15
|
+
*/
|
16
|
+
readonly countPromptTokens: ((prompt: PROMPT) => PromiseLike<number>) | undefined;
|
17
|
+
generateTextResponse(prompt: PROMPT, options?: FunctionOptions<SETTINGS>): PromiseLike<RESPONSE>;
|
18
|
+
extractText(response: RESPONSE): string;
|
19
|
+
/**
|
20
|
+
* Optional. Implement for streaming support.
|
21
|
+
*/
|
22
|
+
readonly generateDeltaStreamResponse: ((prompt: PROMPT, options: FunctionOptions<SETTINGS>) => PromiseLike<AsyncIterable<DeltaEvent<FULL_DELTA>>>) | undefined;
|
23
|
+
/**
|
24
|
+
* Optional. Implement for streaming support.
|
25
|
+
*/
|
26
|
+
readonly extractTextDelta: ((fullDelta: FULL_DELTA) => string | undefined) | undefined;
|
27
|
+
mapPrompt<INPUT_PROMPT>(promptMapping: PromptMapping<INPUT_PROMPT, PROMPT>): PromptMappingTextGenerationModel<INPUT_PROMPT, PROMPT, RESPONSE, FULL_DELTA, SETTINGS, this>;
|
28
|
+
/**
|
29
|
+
* Maximum number of tokens to generate.
|
30
|
+
*/
|
31
|
+
readonly maxCompletionTokens: number | undefined;
|
32
|
+
/**
|
33
|
+
* Sets the maximum number of tokens to generate.
|
34
|
+
* Does nothing if the model does not support this setting.
|
35
|
+
*/
|
36
|
+
withMaxCompletionTokens(maxCompletionTokens: number): this;
|
37
|
+
/**
|
38
|
+
* Sets the stop tokens to use. Stop tokens are not included in the generated text.
|
39
|
+
* Does nothing if the model does not support this setting.
|
40
|
+
*/
|
41
|
+
withStopTokens(stopTokens: string[]): this;
|
42
|
+
}
|
@@ -0,0 +1 @@
|
|
1
|
+
export {};
|
@@ -0,0 +1,22 @@
|
|
1
|
+
import { ModelCallFinishedEventMetadata, ModelCallStartedEventMetadata } from "../ModelCallEvent.js";
|
2
|
+
export type TextStreamingStartedEvent = {
|
3
|
+
type: "text-streaming-started";
|
4
|
+
metadata: ModelCallStartedEventMetadata;
|
5
|
+
settings: unknown;
|
6
|
+
prompt: unknown;
|
7
|
+
};
|
8
|
+
export type TextStreamingFinishedEvent = {
|
9
|
+
type: "text-streaming-finished";
|
10
|
+
metadata: ModelCallFinishedEventMetadata;
|
11
|
+
settings: unknown;
|
12
|
+
prompt: unknown;
|
13
|
+
} & ({
|
14
|
+
status: "success";
|
15
|
+
response: unknown;
|
16
|
+
generatedText: string;
|
17
|
+
} | {
|
18
|
+
status: "failure";
|
19
|
+
error: unknown;
|
20
|
+
} | {
|
21
|
+
status: "abort";
|
22
|
+
});
|
@@ -0,0 +1 @@
|
|
1
|
+
export {};
|
@@ -0,0 +1,23 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.extractTextDeltas = void 0;
|
4
|
+
async function* extractTextDeltas({ deltaIterable, extractDelta, onDone, onError, }) {
|
5
|
+
let accumulatedText = "";
|
6
|
+
let lastFullDelta;
|
7
|
+
for await (const event of deltaIterable) {
|
8
|
+
if (event?.type === "error") {
|
9
|
+
onError(event.error);
|
10
|
+
throw event.error;
|
11
|
+
}
|
12
|
+
if (event?.type === "delta") {
|
13
|
+
lastFullDelta = event.fullDelta;
|
14
|
+
const delta = extractDelta(lastFullDelta);
|
15
|
+
if (delta != null && delta.length > 0) {
|
16
|
+
accumulatedText += delta;
|
17
|
+
yield delta;
|
18
|
+
}
|
19
|
+
}
|
20
|
+
}
|
21
|
+
onDone(accumulatedText, lastFullDelta);
|
22
|
+
}
|
23
|
+
exports.extractTextDeltas = extractTextDeltas;
|
@@ -0,0 +1,7 @@
|
|
1
|
+
import { DeltaEvent } from "./DeltaEvent.js";
|
2
|
+
export declare function extractTextDeltas<FULL_DELTA>({ deltaIterable, extractDelta, onDone, onError, }: {
|
3
|
+
deltaIterable: AsyncIterable<DeltaEvent<FULL_DELTA>>;
|
4
|
+
extractDelta: (fullDelta: FULL_DELTA) => string | undefined;
|
5
|
+
onDone: (fullText: string, lastFullDelta: FULL_DELTA | undefined) => void;
|
6
|
+
onError: (error: unknown) => void;
|
7
|
+
}): AsyncIterable<string>;
|
@@ -0,0 +1,19 @@
|
|
1
|
+
export async function* extractTextDeltas({ deltaIterable, extractDelta, onDone, onError, }) {
|
2
|
+
let accumulatedText = "";
|
3
|
+
let lastFullDelta;
|
4
|
+
for await (const event of deltaIterable) {
|
5
|
+
if (event?.type === "error") {
|
6
|
+
onError(event.error);
|
7
|
+
throw event.error;
|
8
|
+
}
|
9
|
+
if (event?.type === "delta") {
|
10
|
+
lastFullDelta = event.fullDelta;
|
11
|
+
const delta = extractDelta(lastFullDelta);
|
12
|
+
if (delta != null && delta.length > 0) {
|
13
|
+
accumulatedText += delta;
|
14
|
+
yield delta;
|
15
|
+
}
|
16
|
+
}
|
17
|
+
}
|
18
|
+
onDone(accumulatedText, lastFullDelta);
|
19
|
+
}
|
@@ -0,0 +1,67 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.generateText = void 0;
|
4
|
+
const executeCall_js_1 = require("../executeCall.cjs");
|
5
|
+
/**
|
6
|
+
* Generates a text using a prompt.
|
7
|
+
* The prompt format depends on the model.
|
8
|
+
* For example, OpenAI text models expect a string prompt, and OpenAI chat models expect an array of chat messages.
|
9
|
+
*
|
10
|
+
* @example
|
11
|
+
* const model = new OpenAITextGenerationModel(...);
|
12
|
+
*
|
13
|
+
* const { text } = await model.generateText(
|
14
|
+
* "Write a short story about a robot learning to love:\n\n"
|
15
|
+
* );
|
16
|
+
*/
|
17
|
+
async function generateText(
|
18
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
19
|
+
model, prompt, options) {
|
20
|
+
const result = await (0, executeCall_js_1.executeCall)({
|
21
|
+
model,
|
22
|
+
options,
|
23
|
+
generateResponse: (options) => model.generateTextResponse(prompt, options),
|
24
|
+
extractOutputValue: (result) => {
|
25
|
+
const shouldTrimOutput = model.settings.trimOutput ?? true;
|
26
|
+
return shouldTrimOutput
|
27
|
+
? model.extractText(result).trim()
|
28
|
+
: model.extractText(result);
|
29
|
+
},
|
30
|
+
getStartEvent: (metadata, settings) => ({
|
31
|
+
type: "text-generation-started",
|
32
|
+
metadata,
|
33
|
+
settings,
|
34
|
+
prompt,
|
35
|
+
}),
|
36
|
+
getAbortEvent: (metadata, settings) => ({
|
37
|
+
type: "text-generation-finished",
|
38
|
+
status: "abort",
|
39
|
+
metadata,
|
40
|
+
settings,
|
41
|
+
prompt,
|
42
|
+
}),
|
43
|
+
getFailureEvent: (metadata, settings, error) => ({
|
44
|
+
type: "text-generation-finished",
|
45
|
+
status: "failure",
|
46
|
+
metadata,
|
47
|
+
settings,
|
48
|
+
prompt,
|
49
|
+
error,
|
50
|
+
}),
|
51
|
+
getSuccessEvent: (metadata, settings, response, output) => ({
|
52
|
+
type: "text-generation-finished",
|
53
|
+
status: "success",
|
54
|
+
metadata,
|
55
|
+
settings,
|
56
|
+
prompt,
|
57
|
+
response,
|
58
|
+
generatedText: output,
|
59
|
+
}),
|
60
|
+
});
|
61
|
+
return {
|
62
|
+
text: result.output,
|
63
|
+
response: result.response,
|
64
|
+
metadata: result.metadata,
|
65
|
+
};
|
66
|
+
}
|
67
|
+
exports.generateText = generateText;
|
@@ -0,0 +1,20 @@
|
|
1
|
+
import { FunctionOptions } from "../FunctionOptions.js";
|
2
|
+
import { CallMetadata } from "../executeCall.js";
|
3
|
+
import { TextGenerationModel, TextGenerationModelSettings } from "./TextGenerationModel.js";
|
4
|
+
/**
|
5
|
+
* Generates a text using a prompt.
|
6
|
+
* The prompt format depends on the model.
|
7
|
+
* For example, OpenAI text models expect a string prompt, and OpenAI chat models expect an array of chat messages.
|
8
|
+
*
|
9
|
+
* @example
|
10
|
+
* const model = new OpenAITextGenerationModel(...);
|
11
|
+
*
|
12
|
+
* const { text } = await model.generateText(
|
13
|
+
* "Write a short story about a robot learning to love:\n\n"
|
14
|
+
* );
|
15
|
+
*/
|
16
|
+
export declare function generateText<PROMPT, RESPONSE, SETTINGS extends TextGenerationModelSettings>(model: TextGenerationModel<PROMPT, RESPONSE, any, SETTINGS>, prompt: PROMPT, options?: FunctionOptions<SETTINGS>): Promise<{
|
17
|
+
text: string;
|
18
|
+
response: RESPONSE;
|
19
|
+
metadata: CallMetadata<TextGenerationModel<PROMPT, RESPONSE, unknown, SETTINGS>>;
|
20
|
+
}>;
|
@@ -0,0 +1,63 @@
|
|
1
|
+
import { executeCall } from "../executeCall.js";
|
2
|
+
/**
|
3
|
+
* Generates a text using a prompt.
|
4
|
+
* The prompt format depends on the model.
|
5
|
+
* For example, OpenAI text models expect a string prompt, and OpenAI chat models expect an array of chat messages.
|
6
|
+
*
|
7
|
+
* @example
|
8
|
+
* const model = new OpenAITextGenerationModel(...);
|
9
|
+
*
|
10
|
+
* const { text } = await model.generateText(
|
11
|
+
* "Write a short story about a robot learning to love:\n\n"
|
12
|
+
* );
|
13
|
+
*/
|
14
|
+
export async function generateText(
|
15
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
16
|
+
model, prompt, options) {
|
17
|
+
const result = await executeCall({
|
18
|
+
model,
|
19
|
+
options,
|
20
|
+
generateResponse: (options) => model.generateTextResponse(prompt, options),
|
21
|
+
extractOutputValue: (result) => {
|
22
|
+
const shouldTrimOutput = model.settings.trimOutput ?? true;
|
23
|
+
return shouldTrimOutput
|
24
|
+
? model.extractText(result).trim()
|
25
|
+
: model.extractText(result);
|
26
|
+
},
|
27
|
+
getStartEvent: (metadata, settings) => ({
|
28
|
+
type: "text-generation-started",
|
29
|
+
metadata,
|
30
|
+
settings,
|
31
|
+
prompt,
|
32
|
+
}),
|
33
|
+
getAbortEvent: (metadata, settings) => ({
|
34
|
+
type: "text-generation-finished",
|
35
|
+
status: "abort",
|
36
|
+
metadata,
|
37
|
+
settings,
|
38
|
+
prompt,
|
39
|
+
}),
|
40
|
+
getFailureEvent: (metadata, settings, error) => ({
|
41
|
+
type: "text-generation-finished",
|
42
|
+
status: "failure",
|
43
|
+
metadata,
|
44
|
+
settings,
|
45
|
+
prompt,
|
46
|
+
error,
|
47
|
+
}),
|
48
|
+
getSuccessEvent: (metadata, settings, response, output) => ({
|
49
|
+
type: "text-generation-finished",
|
50
|
+
status: "success",
|
51
|
+
metadata,
|
52
|
+
settings,
|
53
|
+
prompt,
|
54
|
+
response,
|
55
|
+
generatedText: output,
|
56
|
+
}),
|
57
|
+
});
|
58
|
+
return {
|
59
|
+
text: result.output,
|
60
|
+
response: result.response,
|
61
|
+
metadata: result.metadata,
|
62
|
+
};
|
63
|
+
}
|
@@ -0,0 +1,30 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.parseEventSourceReadableStream = void 0;
|
4
|
+
const eventsource_parser_1 = require("eventsource-parser");
|
5
|
+
async function* convertReadableStreamToAsyncIterable(reader) {
|
6
|
+
while (true) {
|
7
|
+
const result = await reader.read();
|
8
|
+
if (result.done) {
|
9
|
+
break;
|
10
|
+
}
|
11
|
+
yield result.value;
|
12
|
+
}
|
13
|
+
}
|
14
|
+
/**
|
15
|
+
* @internal
|
16
|
+
*/
|
17
|
+
async function parseEventSourceReadableStream({ stream, callback, }) {
|
18
|
+
try {
|
19
|
+
const parser = (0, eventsource_parser_1.createParser)(callback);
|
20
|
+
const decoder = new TextDecoder();
|
21
|
+
const iterable = convertReadableStreamToAsyncIterable(stream.getReader());
|
22
|
+
for await (const value of iterable) {
|
23
|
+
parser.feed(decoder.decode(value));
|
24
|
+
}
|
25
|
+
}
|
26
|
+
catch (error) {
|
27
|
+
console.error(error); // TODO introduce error handler param
|
28
|
+
}
|
29
|
+
}
|
30
|
+
exports.parseEventSourceReadableStream = parseEventSourceReadableStream;
|
@@ -0,0 +1,8 @@
|
|
1
|
+
import { EventSourceParseCallback } from "eventsource-parser";
|
2
|
+
/**
|
3
|
+
* @internal
|
4
|
+
*/
|
5
|
+
export declare function parseEventSourceReadableStream({ stream, callback, }: {
|
6
|
+
stream: ReadableStream<Uint8Array>;
|
7
|
+
callback: EventSourceParseCallback;
|
8
|
+
}): Promise<void>;
|
@@ -0,0 +1,26 @@
|
|
1
|
+
import { createParser } from "eventsource-parser";
|
2
|
+
async function* convertReadableStreamToAsyncIterable(reader) {
|
3
|
+
while (true) {
|
4
|
+
const result = await reader.read();
|
5
|
+
if (result.done) {
|
6
|
+
break;
|
7
|
+
}
|
8
|
+
yield result.value;
|
9
|
+
}
|
10
|
+
}
|
11
|
+
/**
|
12
|
+
* @internal
|
13
|
+
*/
|
14
|
+
export async function parseEventSourceReadableStream({ stream, callback, }) {
|
15
|
+
try {
|
16
|
+
const parser = createParser(callback);
|
17
|
+
const decoder = new TextDecoder();
|
18
|
+
const iterable = convertReadableStreamToAsyncIterable(stream.getReader());
|
19
|
+
for await (const value of iterable) {
|
20
|
+
parser.feed(decoder.decode(value));
|
21
|
+
}
|
22
|
+
}
|
23
|
+
catch (error) {
|
24
|
+
console.error(error); // TODO introduce error handler param
|
25
|
+
}
|
26
|
+
}
|