modelfusion 0.0.44
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +429 -0
- package/composed-function/index.cjs +22 -0
- package/composed-function/index.d.ts +6 -0
- package/composed-function/index.js +6 -0
- package/composed-function/summarize/SummarizationFunction.cjs +2 -0
- package/composed-function/summarize/SummarizationFunction.d.ts +4 -0
- package/composed-function/summarize/SummarizationFunction.js +1 -0
- package/composed-function/summarize/summarizeRecursively.cjs +19 -0
- package/composed-function/summarize/summarizeRecursively.d.ts +11 -0
- package/composed-function/summarize/summarizeRecursively.js +15 -0
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +29 -0
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +24 -0
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +25 -0
- package/composed-function/use-tool/NoSuchToolError.cjs +17 -0
- package/composed-function/use-tool/NoSuchToolError.d.ts +4 -0
- package/composed-function/use-tool/NoSuchToolError.js +13 -0
- package/composed-function/use-tool/Tool.cjs +43 -0
- package/composed-function/use-tool/Tool.d.ts +15 -0
- package/composed-function/use-tool/Tool.js +39 -0
- package/composed-function/use-tool/useTool.cjs +59 -0
- package/composed-function/use-tool/useTool.d.ts +36 -0
- package/composed-function/use-tool/useTool.js +54 -0
- package/cost/Cost.cjs +38 -0
- package/cost/Cost.d.ts +16 -0
- package/cost/Cost.js +34 -0
- package/cost/CostCalculator.cjs +2 -0
- package/cost/CostCalculator.d.ts +8 -0
- package/cost/CostCalculator.js +1 -0
- package/cost/calculateCost.cjs +28 -0
- package/cost/calculateCost.d.ts +7 -0
- package/cost/calculateCost.js +24 -0
- package/cost/index.cjs +19 -0
- package/cost/index.d.ts +3 -0
- package/cost/index.js +3 -0
- package/index.cjs +25 -0
- package/index.d.ts +9 -0
- package/index.js +9 -0
- package/model-function/AbstractModel.cjs +22 -0
- package/model-function/AbstractModel.d.ts +12 -0
- package/model-function/AbstractModel.js +18 -0
- package/model-function/FunctionOptions.cjs +2 -0
- package/model-function/FunctionOptions.d.ts +6 -0
- package/model-function/FunctionOptions.js +1 -0
- package/model-function/Model.cjs +2 -0
- package/model-function/Model.d.ts +23 -0
- package/model-function/Model.js +1 -0
- package/model-function/ModelCallEvent.cjs +2 -0
- package/model-function/ModelCallEvent.d.ts +18 -0
- package/model-function/ModelCallEvent.js +1 -0
- package/model-function/ModelCallEventSource.cjs +42 -0
- package/model-function/ModelCallEventSource.d.ts +13 -0
- package/model-function/ModelCallEventSource.js +38 -0
- package/model-function/ModelCallObserver.cjs +2 -0
- package/model-function/ModelCallObserver.d.ts +5 -0
- package/model-function/ModelCallObserver.js +1 -0
- package/model-function/ModelInformation.cjs +2 -0
- package/model-function/ModelInformation.d.ts +4 -0
- package/model-function/ModelInformation.js +1 -0
- package/model-function/SuccessfulModelCall.cjs +22 -0
- package/model-function/SuccessfulModelCall.d.ts +9 -0
- package/model-function/SuccessfulModelCall.js +18 -0
- package/model-function/embed-text/TextEmbeddingEvent.cjs +2 -0
- package/model-function/embed-text/TextEmbeddingEvent.d.ts +23 -0
- package/model-function/embed-text/TextEmbeddingEvent.js +1 -0
- package/model-function/embed-text/TextEmbeddingModel.cjs +2 -0
- package/model-function/embed-text/TextEmbeddingModel.d.ts +18 -0
- package/model-function/embed-text/TextEmbeddingModel.js +1 -0
- package/model-function/embed-text/embedText.cjs +90 -0
- package/model-function/embed-text/embedText.d.ts +33 -0
- package/model-function/embed-text/embedText.js +85 -0
- package/model-function/executeCall.cjs +60 -0
- package/model-function/executeCall.d.ts +27 -0
- package/model-function/executeCall.js +56 -0
- package/model-function/generate-image/ImageGenerationEvent.cjs +2 -0
- package/model-function/generate-image/ImageGenerationEvent.d.ts +22 -0
- package/model-function/generate-image/ImageGenerationEvent.js +1 -0
- package/model-function/generate-image/ImageGenerationModel.cjs +2 -0
- package/model-function/generate-image/ImageGenerationModel.d.ts +8 -0
- package/model-function/generate-image/ImageGenerationModel.js +1 -0
- package/model-function/generate-image/generateImage.cjs +63 -0
- package/model-function/generate-image/generateImage.d.ts +23 -0
- package/model-function/generate-image/generateImage.js +59 -0
- package/model-function/generate-json/GenerateJsonModel.cjs +2 -0
- package/model-function/generate-json/GenerateJsonModel.d.ts +10 -0
- package/model-function/generate-json/GenerateJsonModel.js +1 -0
- package/model-function/generate-json/GenerateJsonOrTextModel.cjs +2 -0
- package/model-function/generate-json/GenerateJsonOrTextModel.d.ts +18 -0
- package/model-function/generate-json/GenerateJsonOrTextModel.js +1 -0
- package/model-function/generate-json/JsonGenerationEvent.cjs +2 -0
- package/model-function/generate-json/JsonGenerationEvent.d.ts +22 -0
- package/model-function/generate-json/JsonGenerationEvent.js +1 -0
- package/model-function/generate-json/NoSuchSchemaError.cjs +17 -0
- package/model-function/generate-json/NoSuchSchemaError.d.ts +4 -0
- package/model-function/generate-json/NoSuchSchemaError.js +13 -0
- package/model-function/generate-json/SchemaDefinition.cjs +2 -0
- package/model-function/generate-json/SchemaDefinition.d.ts +6 -0
- package/model-function/generate-json/SchemaDefinition.js +1 -0
- package/model-function/generate-json/SchemaValidationError.cjs +36 -0
- package/model-function/generate-json/SchemaValidationError.d.ts +11 -0
- package/model-function/generate-json/SchemaValidationError.js +32 -0
- package/model-function/generate-json/generateJson.cjs +61 -0
- package/model-function/generate-json/generateJson.d.ts +9 -0
- package/model-function/generate-json/generateJson.js +57 -0
- package/model-function/generate-json/generateJsonOrText.cjs +74 -0
- package/model-function/generate-json/generateJsonOrText.d.ts +25 -0
- package/model-function/generate-json/generateJsonOrText.js +70 -0
- package/model-function/generate-text/AsyncQueue.cjs +66 -0
- package/model-function/generate-text/AsyncQueue.d.ts +17 -0
- package/model-function/generate-text/AsyncQueue.js +62 -0
- package/model-function/generate-text/DeltaEvent.cjs +2 -0
- package/model-function/generate-text/DeltaEvent.d.ts +7 -0
- package/model-function/generate-text/DeltaEvent.js +1 -0
- package/model-function/generate-text/TextDeltaEventSource.cjs +54 -0
- package/model-function/generate-text/TextDeltaEventSource.d.ts +5 -0
- package/model-function/generate-text/TextDeltaEventSource.js +46 -0
- package/model-function/generate-text/TextGenerationEvent.cjs +2 -0
- package/model-function/generate-text/TextGenerationEvent.d.ts +22 -0
- package/model-function/generate-text/TextGenerationEvent.js +1 -0
- package/model-function/generate-text/TextGenerationModel.cjs +2 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +42 -0
- package/model-function/generate-text/TextGenerationModel.js +1 -0
- package/model-function/generate-text/TextStreamingEvent.cjs +2 -0
- package/model-function/generate-text/TextStreamingEvent.d.ts +22 -0
- package/model-function/generate-text/TextStreamingEvent.js +1 -0
- package/model-function/generate-text/extractTextDeltas.cjs +23 -0
- package/model-function/generate-text/extractTextDeltas.d.ts +7 -0
- package/model-function/generate-text/extractTextDeltas.js +19 -0
- package/model-function/generate-text/generateText.cjs +67 -0
- package/model-function/generate-text/generateText.d.ts +20 -0
- package/model-function/generate-text/generateText.js +63 -0
- package/model-function/generate-text/parseEventSourceReadableStream.cjs +30 -0
- package/model-function/generate-text/parseEventSourceReadableStream.d.ts +8 -0
- package/model-function/generate-text/parseEventSourceReadableStream.js +26 -0
- package/model-function/generate-text/streamText.cjs +115 -0
- package/model-function/generate-text/streamText.d.ts +11 -0
- package/model-function/generate-text/streamText.js +111 -0
- package/model-function/index.cjs +47 -0
- package/model-function/index.d.ts +31 -0
- package/model-function/index.js +31 -0
- package/model-function/tokenize-text/Tokenizer.cjs +2 -0
- package/model-function/tokenize-text/Tokenizer.d.ts +19 -0
- package/model-function/tokenize-text/Tokenizer.js +1 -0
- package/model-function/tokenize-text/countTokens.cjs +10 -0
- package/model-function/tokenize-text/countTokens.d.ts +5 -0
- package/model-function/tokenize-text/countTokens.js +6 -0
- package/model-function/transcribe-audio/TranscriptionEvent.cjs +2 -0
- package/model-function/transcribe-audio/TranscriptionEvent.d.ts +22 -0
- package/model-function/transcribe-audio/TranscriptionEvent.js +1 -0
- package/model-function/transcribe-audio/TranscriptionModel.cjs +2 -0
- package/model-function/transcribe-audio/TranscriptionModel.d.ts +8 -0
- package/model-function/transcribe-audio/TranscriptionModel.js +1 -0
- package/model-function/transcribe-audio/transcribe.cjs +62 -0
- package/model-function/transcribe-audio/transcribe.d.ts +22 -0
- package/model-function/transcribe-audio/transcribe.js +58 -0
- package/model-provider/automatic1111/Automatic1111Error.cjs +39 -0
- package/model-provider/automatic1111/Automatic1111Error.d.ts +31 -0
- package/model-provider/automatic1111/Automatic1111Error.js +31 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +76 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +54 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +72 -0
- package/model-provider/automatic1111/index.cjs +20 -0
- package/model-provider/automatic1111/index.d.ts +2 -0
- package/model-provider/automatic1111/index.js +2 -0
- package/model-provider/cohere/CohereError.cjs +36 -0
- package/model-provider/cohere/CohereError.d.ts +22 -0
- package/model-provider/cohere/CohereError.js +28 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.cjs +172 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +119 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.js +165 -0
- package/model-provider/cohere/CohereTextGenerationModel.cjs +283 -0
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +203 -0
- package/model-provider/cohere/CohereTextGenerationModel.js +276 -0
- package/model-provider/cohere/CohereTokenizer.cjs +136 -0
- package/model-provider/cohere/CohereTokenizer.d.ts +118 -0
- package/model-provider/cohere/CohereTokenizer.js +129 -0
- package/model-provider/cohere/index.cjs +22 -0
- package/model-provider/cohere/index.d.ts +4 -0
- package/model-provider/cohere/index.js +4 -0
- package/model-provider/huggingface/HuggingFaceError.cjs +52 -0
- package/model-provider/huggingface/HuggingFaceError.d.ts +22 -0
- package/model-provider/huggingface/HuggingFaceError.js +44 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +174 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +75 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +167 -0
- package/model-provider/huggingface/index.cjs +20 -0
- package/model-provider/huggingface/index.d.ts +2 -0
- package/model-provider/huggingface/index.js +2 -0
- package/model-provider/index.cjs +22 -0
- package/model-provider/index.d.ts +6 -0
- package/model-provider/index.js +6 -0
- package/model-provider/llamacpp/LlamaCppError.cjs +52 -0
- package/model-provider/llamacpp/LlamaCppError.d.ts +22 -0
- package/model-provider/llamacpp/LlamaCppError.js +44 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +96 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +40 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +89 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +245 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +399 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +238 -0
- package/model-provider/llamacpp/LlamaCppTokenizer.cjs +64 -0
- package/model-provider/llamacpp/LlamaCppTokenizer.d.ts +38 -0
- package/model-provider/llamacpp/LlamaCppTokenizer.js +57 -0
- package/model-provider/llamacpp/index.cjs +22 -0
- package/model-provider/llamacpp/index.d.ts +4 -0
- package/model-provider/llamacpp/index.js +4 -0
- package/model-provider/openai/OpenAICostCalculator.cjs +71 -0
- package/model-provider/openai/OpenAICostCalculator.d.ts +6 -0
- package/model-provider/openai/OpenAICostCalculator.js +67 -0
- package/model-provider/openai/OpenAIError.cjs +50 -0
- package/model-provider/openai/OpenAIError.d.ts +47 -0
- package/model-provider/openai/OpenAIError.js +42 -0
- package/model-provider/openai/OpenAIImageGenerationModel.cjs +124 -0
- package/model-provider/openai/OpenAIImageGenerationModel.d.ts +113 -0
- package/model-provider/openai/OpenAIImageGenerationModel.js +119 -0
- package/model-provider/openai/OpenAIModelSettings.cjs +2 -0
- package/model-provider/openai/OpenAIModelSettings.d.ts +8 -0
- package/model-provider/openai/OpenAIModelSettings.js +1 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +171 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +122 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.js +162 -0
- package/model-provider/openai/OpenAITextGenerationModel.cjs +326 -0
- package/model-provider/openai/OpenAITextGenerationModel.d.ts +254 -0
- package/model-provider/openai/OpenAITextGenerationModel.js +317 -0
- package/model-provider/openai/OpenAITranscriptionModel.cjs +195 -0
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +196 -0
- package/model-provider/openai/OpenAITranscriptionModel.js +187 -0
- package/model-provider/openai/TikTokenTokenizer.cjs +86 -0
- package/model-provider/openai/TikTokenTokenizer.d.ts +35 -0
- package/model-provider/openai/TikTokenTokenizer.js +82 -0
- package/model-provider/openai/chat/OpenAIChatMessage.cjs +24 -0
- package/model-provider/openai/chat/OpenAIChatMessage.d.ts +26 -0
- package/model-provider/openai/chat/OpenAIChatMessage.js +21 -0
- package/model-provider/openai/chat/OpenAIChatModel.cjs +288 -0
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +344 -0
- package/model-provider/openai/chat/OpenAIChatModel.js +279 -0
- package/model-provider/openai/chat/OpenAIChatPrompt.cjs +143 -0
- package/model-provider/openai/chat/OpenAIChatPrompt.d.ts +108 -0
- package/model-provider/openai/chat/OpenAIChatPrompt.js +135 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +112 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +19 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +105 -0
- package/model-provider/openai/chat/countOpenAIChatMessageTokens.cjs +28 -0
- package/model-provider/openai/chat/countOpenAIChatMessageTokens.d.ts +20 -0
- package/model-provider/openai/chat/countOpenAIChatMessageTokens.js +23 -0
- package/model-provider/openai/index.cjs +31 -0
- package/model-provider/openai/index.d.ts +13 -0
- package/model-provider/openai/index.js +12 -0
- package/model-provider/stability/StabilityError.cjs +36 -0
- package/model-provider/stability/StabilityError.d.ts +22 -0
- package/model-provider/stability/StabilityError.js +28 -0
- package/model-provider/stability/StabilityImageGenerationModel.cjs +133 -0
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +95 -0
- package/model-provider/stability/StabilityImageGenerationModel.js +129 -0
- package/model-provider/stability/index.cjs +20 -0
- package/model-provider/stability/index.d.ts +2 -0
- package/model-provider/stability/index.js +2 -0
- package/package.json +87 -0
- package/prompt/InstructionPrompt.cjs +2 -0
- package/prompt/InstructionPrompt.d.ts +7 -0
- package/prompt/InstructionPrompt.js +1 -0
- package/prompt/Llama2PromptMapping.cjs +56 -0
- package/prompt/Llama2PromptMapping.d.ts +10 -0
- package/prompt/Llama2PromptMapping.js +51 -0
- package/prompt/OpenAIChatPromptMapping.cjs +62 -0
- package/prompt/OpenAIChatPromptMapping.d.ts +6 -0
- package/prompt/OpenAIChatPromptMapping.js +57 -0
- package/prompt/PromptMapping.cjs +2 -0
- package/prompt/PromptMapping.d.ts +7 -0
- package/prompt/PromptMapping.js +1 -0
- package/prompt/PromptMappingTextGenerationModel.cjs +88 -0
- package/prompt/PromptMappingTextGenerationModel.d.ts +26 -0
- package/prompt/PromptMappingTextGenerationModel.js +84 -0
- package/prompt/TextPromptMapping.cjs +50 -0
- package/prompt/TextPromptMapping.d.ts +14 -0
- package/prompt/TextPromptMapping.js +45 -0
- package/prompt/chat/ChatPrompt.cjs +2 -0
- package/prompt/chat/ChatPrompt.d.ts +33 -0
- package/prompt/chat/ChatPrompt.js +1 -0
- package/prompt/chat/trimChatPrompt.cjs +50 -0
- package/prompt/chat/trimChatPrompt.d.ts +19 -0
- package/prompt/chat/trimChatPrompt.js +46 -0
- package/prompt/chat/validateChatPrompt.cjs +36 -0
- package/prompt/chat/validateChatPrompt.d.ts +8 -0
- package/prompt/chat/validateChatPrompt.js +31 -0
- package/prompt/index.cjs +25 -0
- package/prompt/index.d.ts +9 -0
- package/prompt/index.js +9 -0
- package/run/ConsoleLogger.cjs +12 -0
- package/run/ConsoleLogger.d.ts +6 -0
- package/run/ConsoleLogger.js +8 -0
- package/run/DefaultRun.cjs +78 -0
- package/run/DefaultRun.d.ts +24 -0
- package/run/DefaultRun.js +74 -0
- package/run/IdMetadata.cjs +2 -0
- package/run/IdMetadata.d.ts +7 -0
- package/run/IdMetadata.js +1 -0
- package/run/Run.cjs +2 -0
- package/run/Run.d.ts +27 -0
- package/run/Run.js +1 -0
- package/run/RunFunction.cjs +2 -0
- package/run/RunFunction.d.ts +13 -0
- package/run/RunFunction.js +1 -0
- package/run/Vector.cjs +2 -0
- package/run/Vector.d.ts +5 -0
- package/run/Vector.js +1 -0
- package/run/index.cjs +22 -0
- package/run/index.d.ts +6 -0
- package/run/index.js +6 -0
- package/text-chunk/TextChunk.cjs +2 -0
- package/text-chunk/TextChunk.d.ts +3 -0
- package/text-chunk/TextChunk.js +1 -0
- package/text-chunk/index.cjs +22 -0
- package/text-chunk/index.d.ts +6 -0
- package/text-chunk/index.js +6 -0
- package/text-chunk/retrieve-text-chunks/TextChunkRetriever.cjs +2 -0
- package/text-chunk/retrieve-text-chunks/TextChunkRetriever.d.ts +8 -0
- package/text-chunk/retrieve-text-chunks/TextChunkRetriever.js +1 -0
- package/text-chunk/retrieve-text-chunks/retrieveTextChunks.cjs +10 -0
- package/text-chunk/retrieve-text-chunks/retrieveTextChunks.d.ts +6 -0
- package/text-chunk/retrieve-text-chunks/retrieveTextChunks.js +6 -0
- package/text-chunk/split/SplitFunction.cjs +2 -0
- package/text-chunk/split/SplitFunction.d.ts +4 -0
- package/text-chunk/split/SplitFunction.js +1 -0
- package/text-chunk/split/splitOnSeparator.cjs +12 -0
- package/text-chunk/split/splitOnSeparator.d.ts +8 -0
- package/text-chunk/split/splitOnSeparator.js +7 -0
- package/text-chunk/split/splitRecursively.cjs +41 -0
- package/text-chunk/split/splitRecursively.d.ts +22 -0
- package/text-chunk/split/splitRecursively.js +33 -0
- package/util/DurationMeasurement.cjs +42 -0
- package/util/DurationMeasurement.d.ts +5 -0
- package/util/DurationMeasurement.js +38 -0
- package/util/ErrorHandler.cjs +2 -0
- package/util/ErrorHandler.d.ts +1 -0
- package/util/ErrorHandler.js +1 -0
- package/util/SafeResult.cjs +2 -0
- package/util/SafeResult.d.ts +8 -0
- package/util/SafeResult.js +1 -0
- package/util/api/AbortError.cjs +9 -0
- package/util/api/AbortError.d.ts +3 -0
- package/util/api/AbortError.js +5 -0
- package/util/api/ApiCallError.cjs +45 -0
- package/util/api/ApiCallError.d.ts +15 -0
- package/util/api/ApiCallError.js +41 -0
- package/util/api/RetryError.cjs +24 -0
- package/util/api/RetryError.d.ts +10 -0
- package/util/api/RetryError.js +20 -0
- package/util/api/RetryFunction.cjs +2 -0
- package/util/api/RetryFunction.d.ts +1 -0
- package/util/api/RetryFunction.js +1 -0
- package/util/api/ThrottleFunction.cjs +2 -0
- package/util/api/ThrottleFunction.d.ts +1 -0
- package/util/api/ThrottleFunction.js +1 -0
- package/util/api/callWithRetryAndThrottle.cjs +7 -0
- package/util/api/callWithRetryAndThrottle.d.ts +7 -0
- package/util/api/callWithRetryAndThrottle.js +3 -0
- package/util/api/postToApi.cjs +103 -0
- package/util/api/postToApi.d.ts +29 -0
- package/util/api/postToApi.js +96 -0
- package/util/api/retryNever.cjs +8 -0
- package/util/api/retryNever.d.ts +4 -0
- package/util/api/retryNever.js +4 -0
- package/util/api/retryWithExponentialBackoff.cjs +48 -0
- package/util/api/retryWithExponentialBackoff.d.ts +10 -0
- package/util/api/retryWithExponentialBackoff.js +44 -0
- package/util/api/throttleMaxConcurrency.cjs +65 -0
- package/util/api/throttleMaxConcurrency.d.ts +7 -0
- package/util/api/throttleMaxConcurrency.js +61 -0
- package/util/api/throttleUnlimitedConcurrency.cjs +8 -0
- package/util/api/throttleUnlimitedConcurrency.d.ts +5 -0
- package/util/api/throttleUnlimitedConcurrency.js +4 -0
- package/util/cosineSimilarity.cjs +26 -0
- package/util/cosineSimilarity.d.ts +11 -0
- package/util/cosineSimilarity.js +22 -0
- package/util/index.cjs +26 -0
- package/util/index.d.ts +10 -0
- package/util/index.js +10 -0
- package/util/never.cjs +6 -0
- package/util/never.d.ts +1 -0
- package/util/never.js +2 -0
- package/util/runSafe.cjs +15 -0
- package/util/runSafe.d.ts +2 -0
- package/util/runSafe.js +11 -0
- package/vector-index/VectorIndex.cjs +2 -0
- package/vector-index/VectorIndex.d.ts +18 -0
- package/vector-index/VectorIndex.js +1 -0
- package/vector-index/VectorIndexSimilarTextChunkRetriever.cjs +57 -0
- package/vector-index/VectorIndexSimilarTextChunkRetriever.d.ts +20 -0
- package/vector-index/VectorIndexSimilarTextChunkRetriever.js +53 -0
- package/vector-index/VectorIndexTextChunkStore.cjs +77 -0
- package/vector-index/VectorIndexTextChunkStore.d.ts +35 -0
- package/vector-index/VectorIndexTextChunkStore.js +73 -0
- package/vector-index/index.cjs +22 -0
- package/vector-index/index.d.ts +6 -0
- package/vector-index/index.js +6 -0
- package/vector-index/memory/MemoryVectorIndex.cjs +63 -0
- package/vector-index/memory/MemoryVectorIndex.d.ts +31 -0
- package/vector-index/memory/MemoryVectorIndex.js +56 -0
- package/vector-index/pinecone/PineconeVectorIndex.cjs +66 -0
- package/vector-index/pinecone/PineconeVectorIndex.d.ts +29 -0
- package/vector-index/pinecone/PineconeVectorIndex.js +62 -0
- package/vector-index/upsertTextChunks.cjs +15 -0
- package/vector-index/upsertTextChunks.d.ts +11 -0
- package/vector-index/upsertTextChunks.js +11 -0
@@ -0,0 +1,95 @@
|
|
1
|
+
import { z } from "zod";
|
2
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
+
import { FunctionOptions } from "../../model-function/FunctionOptions.js";
|
4
|
+
import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-function/generate-image/ImageGenerationModel.js";
|
5
|
+
import { RetryFunction } from "../../util/api/RetryFunction.js";
|
6
|
+
import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
|
7
|
+
/**
|
8
|
+
* Create an image generation model that calls the Stability AI image generation API.
|
9
|
+
*
|
10
|
+
* @see https://api.stability.ai/docs#tag/v1generation/operation/textToImage
|
11
|
+
*
|
12
|
+
* @example
|
13
|
+
* const { image } = await generateImage(
|
14
|
+
* new StabilityImageGenerationModel({
|
15
|
+
* model: "stable-diffusion-512-v2-1",
|
16
|
+
* cfgScale: 7,
|
17
|
+
* clipGuidancePreset: "FAST_BLUE",
|
18
|
+
* height: 512,
|
19
|
+
* width: 512,
|
20
|
+
* samples: 1,
|
21
|
+
* steps: 30,
|
22
|
+
* })
|
23
|
+
* [
|
24
|
+
* { text: "the wicked witch of the west" },
|
25
|
+
* { text: "style of early 19th century painting", weight: 0.5 },
|
26
|
+
* ]
|
27
|
+
* );
|
28
|
+
*/
|
29
|
+
export declare class StabilityImageGenerationModel extends AbstractModel<StabilityImageGenerationModelSettings> implements ImageGenerationModel<StabilityImageGenerationPrompt, StabilityImageGenerationResponse, StabilityImageGenerationModelSettings> {
|
30
|
+
constructor(settings: StabilityImageGenerationModelSettings);
|
31
|
+
readonly provider: "stability";
|
32
|
+
get modelName(): string;
|
33
|
+
private get apiKey();
|
34
|
+
callAPI(input: StabilityImageGenerationPrompt, options?: FunctionOptions<StabilityImageGenerationModelSettings>): Promise<StabilityImageGenerationResponse>;
|
35
|
+
generateImageResponse(prompt: StabilityImageGenerationPrompt, options?: FunctionOptions<StabilityImageGenerationModelSettings>): Promise<{
|
36
|
+
artifacts: {
|
37
|
+
seed: number;
|
38
|
+
base64: string;
|
39
|
+
finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
|
40
|
+
}[];
|
41
|
+
}>;
|
42
|
+
extractBase64Image(response: StabilityImageGenerationResponse): string;
|
43
|
+
withSettings(additionalSettings: StabilityImageGenerationModelSettings): this;
|
44
|
+
}
|
45
|
+
export interface StabilityImageGenerationModelSettings extends ImageGenerationModelSettings {
|
46
|
+
model: string;
|
47
|
+
baseUrl?: string;
|
48
|
+
apiKey?: string;
|
49
|
+
retry?: RetryFunction;
|
50
|
+
throttle?: ThrottleFunction;
|
51
|
+
height?: number;
|
52
|
+
width?: number;
|
53
|
+
cfgScale?: number;
|
54
|
+
clipGuidancePreset?: string;
|
55
|
+
sampler?: StabilityImageGenerationSampler;
|
56
|
+
samples?: number;
|
57
|
+
seed?: number;
|
58
|
+
steps?: number;
|
59
|
+
stylePreset?: StabilityImageGenerationStylePreset;
|
60
|
+
}
|
61
|
+
declare const stabilityImageGenerationResponseSchema: z.ZodObject<{
|
62
|
+
artifacts: z.ZodArray<z.ZodObject<{
|
63
|
+
base64: z.ZodString;
|
64
|
+
seed: z.ZodNumber;
|
65
|
+
finishReason: z.ZodEnum<["SUCCESS", "ERROR", "CONTENT_FILTERED"]>;
|
66
|
+
}, "strip", z.ZodTypeAny, {
|
67
|
+
seed: number;
|
68
|
+
base64: string;
|
69
|
+
finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
|
70
|
+
}, {
|
71
|
+
seed: number;
|
72
|
+
base64: string;
|
73
|
+
finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
|
74
|
+
}>, "many">;
|
75
|
+
}, "strip", z.ZodTypeAny, {
|
76
|
+
artifacts: {
|
77
|
+
seed: number;
|
78
|
+
base64: string;
|
79
|
+
finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
|
80
|
+
}[];
|
81
|
+
}, {
|
82
|
+
artifacts: {
|
83
|
+
seed: number;
|
84
|
+
base64: string;
|
85
|
+
finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
|
86
|
+
}[];
|
87
|
+
}>;
|
88
|
+
export type StabilityImageGenerationResponse = z.infer<typeof stabilityImageGenerationResponseSchema>;
|
89
|
+
export type StabilityImageGenerationStylePreset = "enhance" | "anime" | "photographic" | "digital-art" | "comic-book" | "fantasy-art" | "line-art" | "analog-film" | "neon-punk" | "isometric" | "low-poly" | "origami" | "modeling-compound" | "cinematic" | "3d-model" | "pixel-art" | "tile-texture";
|
90
|
+
export type StabilityImageGenerationSampler = "DDIM" | "DDPM" | "K_DPMPP_2M" | "K_DPMPP_2S_ANCESTRAL" | "K_DPM_2" | "K_DPM_2_ANCESTRAL" | "K_EULER" | "K_EULER_ANCESTRAL" | "K_HEUN" | "K_LMS";
|
91
|
+
export type StabilityImageGenerationPrompt = Array<{
|
92
|
+
text: string;
|
93
|
+
weight?: number;
|
94
|
+
}>;
|
95
|
+
export {};
|
@@ -0,0 +1,129 @@
|
|
1
|
+
import { z } from "zod";
|
2
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
+
import { callWithRetryAndThrottle } from "../../util/api/callWithRetryAndThrottle.js";
|
4
|
+
import { createJsonResponseHandler, postJsonToApi, } from "../../util/api/postToApi.js";
|
5
|
+
import { failedStabilityCallResponseHandler } from "./StabilityError.js";
|
6
|
+
/**
|
7
|
+
* Create an image generation model that calls the Stability AI image generation API.
|
8
|
+
*
|
9
|
+
* @see https://api.stability.ai/docs#tag/v1generation/operation/textToImage
|
10
|
+
*
|
11
|
+
* @example
|
12
|
+
* const { image } = await generateImage(
|
13
|
+
* new StabilityImageGenerationModel({
|
14
|
+
* model: "stable-diffusion-512-v2-1",
|
15
|
+
* cfgScale: 7,
|
16
|
+
* clipGuidancePreset: "FAST_BLUE",
|
17
|
+
* height: 512,
|
18
|
+
* width: 512,
|
19
|
+
* samples: 1,
|
20
|
+
* steps: 30,
|
21
|
+
* })
|
22
|
+
* [
|
23
|
+
* { text: "the wicked witch of the west" },
|
24
|
+
* { text: "style of early 19th century painting", weight: 0.5 },
|
25
|
+
* ]
|
26
|
+
* );
|
27
|
+
*/
|
28
|
+
export class StabilityImageGenerationModel extends AbstractModel {
|
29
|
+
constructor(settings) {
|
30
|
+
super({ settings });
|
31
|
+
Object.defineProperty(this, "provider", {
|
32
|
+
enumerable: true,
|
33
|
+
configurable: true,
|
34
|
+
writable: true,
|
35
|
+
value: "stability"
|
36
|
+
});
|
37
|
+
}
|
38
|
+
get modelName() {
|
39
|
+
return this.settings.model;
|
40
|
+
}
|
41
|
+
get apiKey() {
|
42
|
+
const apiKey = this.settings.apiKey ?? process.env.STABILITY_API_KEY;
|
43
|
+
if (apiKey == null) {
|
44
|
+
throw new Error("No API key provided. Either pass an API key to the constructor or set the STABILITY_API_KEY environment variable.");
|
45
|
+
}
|
46
|
+
return apiKey;
|
47
|
+
}
|
48
|
+
async callAPI(input, options) {
|
49
|
+
const run = options?.run;
|
50
|
+
const settings = options?.settings;
|
51
|
+
const callSettings = Object.assign({
|
52
|
+
apiKey: this.apiKey,
|
53
|
+
}, this.settings, settings, {
|
54
|
+
abortSignal: run?.abortSignal,
|
55
|
+
engineId: this.settings.model,
|
56
|
+
textPrompts: input,
|
57
|
+
});
|
58
|
+
return callWithRetryAndThrottle({
|
59
|
+
retry: this.settings.retry,
|
60
|
+
throttle: this.settings.throttle,
|
61
|
+
call: async () => callStabilityImageGenerationAPI(callSettings),
|
62
|
+
});
|
63
|
+
}
|
64
|
+
generateImageResponse(prompt, options) {
|
65
|
+
return this.callAPI(prompt, options);
|
66
|
+
}
|
67
|
+
extractBase64Image(response) {
|
68
|
+
return response.artifacts[0].base64;
|
69
|
+
}
|
70
|
+
withSettings(additionalSettings) {
|
71
|
+
return new StabilityImageGenerationModel(Object.assign({}, this.settings, additionalSettings));
|
72
|
+
}
|
73
|
+
}
|
74
|
+
const stabilityImageGenerationResponseSchema = z.object({
|
75
|
+
artifacts: z.array(z.object({
|
76
|
+
base64: z.string(),
|
77
|
+
seed: z.number(),
|
78
|
+
finishReason: z.enum(["SUCCESS", "ERROR", "CONTENT_FILTERED"]),
|
79
|
+
})),
|
80
|
+
});
|
81
|
+
/**
|
82
|
+
* Call the Stability AI API for image generation.
|
83
|
+
*
|
84
|
+
* @see https://api.stability.ai/docs#tag/v1generation/operation/textToImage
|
85
|
+
*
|
86
|
+
* @example
|
87
|
+
* const imageResponse = await callStabilityImageGenerationAPI({
|
88
|
+
* apiKey: STABILITY_API_KEY,
|
89
|
+
* engineId: "stable-diffusion-512-v2-1",
|
90
|
+
* textPrompts: [
|
91
|
+
* { text: "the wicked witch of the west" },
|
92
|
+
* { text: "style of early 19th century painting", weight: 0.5 },
|
93
|
+
* ],
|
94
|
+
* cfgScale: 7,
|
95
|
+
* clipGuidancePreset: "FAST_BLUE",
|
96
|
+
* height: 512,
|
97
|
+
* width: 512,
|
98
|
+
* samples: 1,
|
99
|
+
* steps: 30,
|
100
|
+
* });
|
101
|
+
*
|
102
|
+
* imageResponse.artifacts.forEach((image, index) => {
|
103
|
+
* fs.writeFileSync(
|
104
|
+
* `./stability-image-example-${index}.png`,
|
105
|
+
* Buffer.from(image.base64, "base64")
|
106
|
+
* );
|
107
|
+
* });
|
108
|
+
*/
|
109
|
+
async function callStabilityImageGenerationAPI({ baseUrl = "https://api.stability.ai/v1", abortSignal, apiKey, engineId, height, width, textPrompts, cfgScale, clipGuidancePreset, sampler, samples, seed, steps, stylePreset, }) {
|
110
|
+
return postJsonToApi({
|
111
|
+
url: `${baseUrl}/generation/${engineId}/text-to-image`,
|
112
|
+
apiKey,
|
113
|
+
body: {
|
114
|
+
height,
|
115
|
+
width,
|
116
|
+
text_prompts: textPrompts,
|
117
|
+
cfg_scale: cfgScale,
|
118
|
+
clip_guidance_preset: clipGuidancePreset,
|
119
|
+
sampler,
|
120
|
+
samples,
|
121
|
+
seed,
|
122
|
+
steps,
|
123
|
+
style_preset: stylePreset,
|
124
|
+
},
|
125
|
+
failedResponseHandler: failedStabilityCallResponseHandler,
|
126
|
+
successfulResponseHandler: createJsonResponseHandler(stabilityImageGenerationResponseSchema),
|
127
|
+
abortSignal,
|
128
|
+
});
|
129
|
+
}
|
@@ -0,0 +1,20 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
+
if (k2 === undefined) k2 = k;
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
+
}
|
8
|
+
Object.defineProperty(o, k2, desc);
|
9
|
+
}) : (function(o, m, k, k2) {
|
10
|
+
if (k2 === undefined) k2 = k;
|
11
|
+
o[k2] = m[k];
|
12
|
+
}));
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
15
|
+
};
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
17
|
+
exports.StabilityError = void 0;
|
18
|
+
var StabilityError_js_1 = require("./StabilityError.cjs");
|
19
|
+
Object.defineProperty(exports, "StabilityError", { enumerable: true, get: function () { return StabilityError_js_1.StabilityError; } });
|
20
|
+
__exportStar(require("./StabilityImageGenerationModel.cjs"), exports);
|
package/package.json
ADDED
@@ -0,0 +1,87 @@
|
|
1
|
+
{
|
2
|
+
"name": "modelfusion",
|
3
|
+
"description": "Build AI applications, chatbots, and agents with JavaScript and TypeScript.",
|
4
|
+
"version": "0.0.44",
|
5
|
+
"author": "Lars Grammel",
|
6
|
+
"license": "MIT",
|
7
|
+
"keywords": [
|
8
|
+
"llm",
|
9
|
+
"embedding",
|
10
|
+
"openai",
|
11
|
+
"cohere",
|
12
|
+
"huggingface",
|
13
|
+
"stability-ai",
|
14
|
+
"gpt-3",
|
15
|
+
"gpt-4",
|
16
|
+
"whisper",
|
17
|
+
"chatbot",
|
18
|
+
"ai"
|
19
|
+
],
|
20
|
+
"homepage": "https://ai-utils.dev/",
|
21
|
+
"repository": {
|
22
|
+
"type": "git",
|
23
|
+
"url": "https://github.com/lgrammel/ai-utils.js"
|
24
|
+
},
|
25
|
+
"bugs": {
|
26
|
+
"url": "https://github.com/lgrammel/ai-utils.js/issues"
|
27
|
+
},
|
28
|
+
"type": "module",
|
29
|
+
"sideEffects": false,
|
30
|
+
"private": false,
|
31
|
+
"engines": {
|
32
|
+
"node": ">=18"
|
33
|
+
},
|
34
|
+
"files": [
|
35
|
+
"./*"
|
36
|
+
],
|
37
|
+
"exports": {
|
38
|
+
".": {
|
39
|
+
"types": "./index.d.ts",
|
40
|
+
"import": "./index.js",
|
41
|
+
"require": "./index.cjs"
|
42
|
+
}
|
43
|
+
},
|
44
|
+
"scripts": {
|
45
|
+
"setup": "husky install",
|
46
|
+
"lint": "eslint --ext .ts src",
|
47
|
+
"clean": "rimraf build dist",
|
48
|
+
"build": "npm run build:esm && npm run build:cjs",
|
49
|
+
"build:esm": "tsc --outDir dist/",
|
50
|
+
"build:cjs": "tsc --outDir build/cjs/ -p tsconfig.cjs.json && node bin/prepare-cjs.js",
|
51
|
+
"dist:copy-files": "copyfiles package.json README.md LICENSE dist",
|
52
|
+
"dist": "npm run clean && npm run lint && npm run build && npm run dist:copy-files"
|
53
|
+
},
|
54
|
+
"dependencies": {
|
55
|
+
"eventsource-parser": "1.0.0",
|
56
|
+
"js-tiktoken": "1.0.7",
|
57
|
+
"nanoid": "3.3.6",
|
58
|
+
"secure-json-parse": "2.7.0"
|
59
|
+
},
|
60
|
+
"devDependencies": {
|
61
|
+
"@pinecone-database/pinecone": "^0.1.6",
|
62
|
+
"@tsconfig/recommended": "1.0.2",
|
63
|
+
"@types/node": "18.11.9",
|
64
|
+
"@typescript-eslint/eslint-plugin": "^6.1.0",
|
65
|
+
"@typescript-eslint/parser": "^6.1.0",
|
66
|
+
"copyfiles": "2.4.1",
|
67
|
+
"eslint": "^8.45.0",
|
68
|
+
"eslint-config-prettier": "8.9.0",
|
69
|
+
"husky": "^8.0.3",
|
70
|
+
"lint-staged": "13.2.3",
|
71
|
+
"prettier": "3.0.0",
|
72
|
+
"rimraf": "5.0.1",
|
73
|
+
"typescript": "5.1.6",
|
74
|
+
"zod": "3.21.4",
|
75
|
+
"zod-to-json-schema": "3.21.4"
|
76
|
+
},
|
77
|
+
"peerDependencies": {
|
78
|
+
"@pinecone-database/pinecone": "*",
|
79
|
+
"zod": "^3",
|
80
|
+
"zod-to-json-schema": "^3"
|
81
|
+
},
|
82
|
+
"peerDependenciesMeta": {
|
83
|
+
"@pinecone-database/pinecone": {
|
84
|
+
"optional": true
|
85
|
+
}
|
86
|
+
}
|
87
|
+
}
|
@@ -0,0 +1 @@
|
|
1
|
+
export {};
|
@@ -0,0 +1,56 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.ChatToLlama2PromptMapping = exports.InstructionToLlama2PromptMapping = void 0;
|
4
|
+
const validateChatPrompt_js_1 = require("./chat/validateChatPrompt.cjs");
|
5
|
+
// see https://github.com/facebookresearch/llama/blob/6c7fe276574e78057f917549435a2554000a876d/llama/generation.py#L44
|
6
|
+
const BEGIN_SEGMENT = "<s>";
|
7
|
+
const END_SEGMENT = "</s>\n";
|
8
|
+
const BEGIN_INSTRUCTION = "[INST]";
|
9
|
+
const END_INSTRUCTION = "[/INST]\n";
|
10
|
+
const BEGIN_SYSTEM = "<<SYS>>\n";
|
11
|
+
const END_SYSTEM = "\n<</SYS>>\n\n";
|
12
|
+
/**
|
13
|
+
* Maps an instruction prompt to the Llama2 prompt format.
|
14
|
+
*
|
15
|
+
* @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
|
16
|
+
*/
|
17
|
+
const InstructionToLlama2PromptMapping = () => ({
|
18
|
+
stopTokens: [END_SEGMENT],
|
19
|
+
map: (instruction) => `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction.system != null
|
20
|
+
? ` ${BEGIN_SYSTEM}${instruction.system}${END_SYSTEM}`
|
21
|
+
: ""} ${instruction.instruction} ${END_INSTRUCTION}\n`,
|
22
|
+
});
|
23
|
+
exports.InstructionToLlama2PromptMapping = InstructionToLlama2PromptMapping;
|
24
|
+
const ChatToLlama2PromptMapping = () => ({
|
25
|
+
map: (chatPrompt) => {
|
26
|
+
(0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
|
27
|
+
let text = "";
|
28
|
+
for (let i = 0; i < chatPrompt.length; i++) {
|
29
|
+
const message = chatPrompt[i];
|
30
|
+
// system message:
|
31
|
+
if (i === 0 &&
|
32
|
+
"system" in message &&
|
33
|
+
typeof message.system === "string") {
|
34
|
+
// Separate section for system message to simplify implementation
|
35
|
+
// (this is slightly different from the original instructions):
|
36
|
+
text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${BEGIN_SYSTEM}${message.system}${END_SYSTEM}${END_INSTRUCTION}${END_SEGMENT}`;
|
37
|
+
continue;
|
38
|
+
}
|
39
|
+
// user message
|
40
|
+
if ("user" in message) {
|
41
|
+
text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${message.user}${END_INSTRUCTION}`;
|
42
|
+
continue;
|
43
|
+
}
|
44
|
+
// ai message:
|
45
|
+
if ("ai" in message) {
|
46
|
+
text += `${message.ai}${END_SEGMENT}`;
|
47
|
+
continue;
|
48
|
+
}
|
49
|
+
// unsupported message:
|
50
|
+
throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
|
51
|
+
}
|
52
|
+
return text;
|
53
|
+
},
|
54
|
+
stopTokens: [END_SEGMENT],
|
55
|
+
});
|
56
|
+
exports.ChatToLlama2PromptMapping = ChatToLlama2PromptMapping;
|
@@ -0,0 +1,10 @@
|
|
1
|
+
import { PromptMapping } from "./PromptMapping.js";
|
2
|
+
import { InstructionPrompt } from "./InstructionPrompt.js";
|
3
|
+
import { ChatPrompt } from "./chat/ChatPrompt.js";
|
4
|
+
/**
|
5
|
+
* Maps an instruction prompt to the Llama2 prompt format.
|
6
|
+
*
|
7
|
+
* @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
|
8
|
+
*/
|
9
|
+
export declare const InstructionToLlama2PromptMapping: () => PromptMapping<InstructionPrompt, string>;
|
10
|
+
export declare const ChatToLlama2PromptMapping: () => PromptMapping<ChatPrompt, string>;
|
@@ -0,0 +1,51 @@
|
|
1
|
+
import { validateChatPrompt } from "./chat/validateChatPrompt.js";
|
2
|
+
// see https://github.com/facebookresearch/llama/blob/6c7fe276574e78057f917549435a2554000a876d/llama/generation.py#L44
|
3
|
+
const BEGIN_SEGMENT = "<s>";
|
4
|
+
const END_SEGMENT = "</s>\n";
|
5
|
+
const BEGIN_INSTRUCTION = "[INST]";
|
6
|
+
const END_INSTRUCTION = "[/INST]\n";
|
7
|
+
const BEGIN_SYSTEM = "<<SYS>>\n";
|
8
|
+
const END_SYSTEM = "\n<</SYS>>\n\n";
|
9
|
+
/**
|
10
|
+
* Maps an instruction prompt to the Llama2 prompt format.
|
11
|
+
*
|
12
|
+
* @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
|
13
|
+
*/
|
14
|
+
export const InstructionToLlama2PromptMapping = () => ({
|
15
|
+
stopTokens: [END_SEGMENT],
|
16
|
+
map: (instruction) => `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction.system != null
|
17
|
+
? ` ${BEGIN_SYSTEM}${instruction.system}${END_SYSTEM}`
|
18
|
+
: ""} ${instruction.instruction} ${END_INSTRUCTION}\n`,
|
19
|
+
});
|
20
|
+
export const ChatToLlama2PromptMapping = () => ({
|
21
|
+
map: (chatPrompt) => {
|
22
|
+
validateChatPrompt(chatPrompt);
|
23
|
+
let text = "";
|
24
|
+
for (let i = 0; i < chatPrompt.length; i++) {
|
25
|
+
const message = chatPrompt[i];
|
26
|
+
// system message:
|
27
|
+
if (i === 0 &&
|
28
|
+
"system" in message &&
|
29
|
+
typeof message.system === "string") {
|
30
|
+
// Separate section for system message to simplify implementation
|
31
|
+
// (this is slightly different from the original instructions):
|
32
|
+
text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${BEGIN_SYSTEM}${message.system}${END_SYSTEM}${END_INSTRUCTION}${END_SEGMENT}`;
|
33
|
+
continue;
|
34
|
+
}
|
35
|
+
// user message
|
36
|
+
if ("user" in message) {
|
37
|
+
text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${message.user}${END_INSTRUCTION}`;
|
38
|
+
continue;
|
39
|
+
}
|
40
|
+
// ai message:
|
41
|
+
if ("ai" in message) {
|
42
|
+
text += `${message.ai}${END_SEGMENT}`;
|
43
|
+
continue;
|
44
|
+
}
|
45
|
+
// unsupported message:
|
46
|
+
throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
|
47
|
+
}
|
48
|
+
return text;
|
49
|
+
},
|
50
|
+
stopTokens: [END_SEGMENT],
|
51
|
+
});
|
@@ -0,0 +1,62 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.ChatToOpenAIChatPromptMapping = exports.InstructionToOpenAIChatPromptMapping = void 0;
|
4
|
+
const validateChatPrompt_js_1 = require("./chat/validateChatPrompt.cjs");
|
5
|
+
const InstructionToOpenAIChatPromptMapping = () => ({
|
6
|
+
map: (instruction) => {
|
7
|
+
const messages = [];
|
8
|
+
if (instruction.system != null) {
|
9
|
+
messages.push({
|
10
|
+
role: "system",
|
11
|
+
content: instruction.system,
|
12
|
+
});
|
13
|
+
}
|
14
|
+
messages.push({
|
15
|
+
role: "user",
|
16
|
+
content: instruction.instruction,
|
17
|
+
});
|
18
|
+
return messages;
|
19
|
+
},
|
20
|
+
stopTokens: [],
|
21
|
+
});
|
22
|
+
exports.InstructionToOpenAIChatPromptMapping = InstructionToOpenAIChatPromptMapping;
|
23
|
+
const ChatToOpenAIChatPromptMapping = () => ({
|
24
|
+
map: (chatPrompt) => {
|
25
|
+
(0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
|
26
|
+
const messages = [];
|
27
|
+
for (let i = 0; i < chatPrompt.length; i++) {
|
28
|
+
const message = chatPrompt[i];
|
29
|
+
// system message:
|
30
|
+
if (i === 0 &&
|
31
|
+
"system" in message &&
|
32
|
+
typeof message.system === "string") {
|
33
|
+
messages.push({
|
34
|
+
role: "system",
|
35
|
+
content: message.system,
|
36
|
+
});
|
37
|
+
continue;
|
38
|
+
}
|
39
|
+
// user message
|
40
|
+
if ("user" in message) {
|
41
|
+
messages.push({
|
42
|
+
role: "user",
|
43
|
+
content: message.user,
|
44
|
+
});
|
45
|
+
continue;
|
46
|
+
}
|
47
|
+
// ai message:
|
48
|
+
if ("ai" in message) {
|
49
|
+
messages.push({
|
50
|
+
role: "assistant",
|
51
|
+
content: message.ai,
|
52
|
+
});
|
53
|
+
continue;
|
54
|
+
}
|
55
|
+
// unsupported message:
|
56
|
+
throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
|
57
|
+
}
|
58
|
+
return messages;
|
59
|
+
},
|
60
|
+
stopTokens: [],
|
61
|
+
});
|
62
|
+
exports.ChatToOpenAIChatPromptMapping = ChatToOpenAIChatPromptMapping;
|
@@ -0,0 +1,6 @@
|
|
1
|
+
import { OpenAIChatMessage } from "../model-provider/openai/chat/OpenAIChatMessage.js";
|
2
|
+
import { ChatPrompt } from "./chat/ChatPrompt.js";
|
3
|
+
import { InstructionPrompt } from "./InstructionPrompt.js";
|
4
|
+
import { PromptMapping } from "./PromptMapping.js";
|
5
|
+
export declare const InstructionToOpenAIChatPromptMapping: () => PromptMapping<InstructionPrompt, Array<OpenAIChatMessage>>;
|
6
|
+
export declare const ChatToOpenAIChatPromptMapping: () => PromptMapping<ChatPrompt, Array<OpenAIChatMessage>>;
|
@@ -0,0 +1,57 @@
|
|
1
|
+
import { validateChatPrompt } from "./chat/validateChatPrompt.js";
|
2
|
+
export const InstructionToOpenAIChatPromptMapping = () => ({
|
3
|
+
map: (instruction) => {
|
4
|
+
const messages = [];
|
5
|
+
if (instruction.system != null) {
|
6
|
+
messages.push({
|
7
|
+
role: "system",
|
8
|
+
content: instruction.system,
|
9
|
+
});
|
10
|
+
}
|
11
|
+
messages.push({
|
12
|
+
role: "user",
|
13
|
+
content: instruction.instruction,
|
14
|
+
});
|
15
|
+
return messages;
|
16
|
+
},
|
17
|
+
stopTokens: [],
|
18
|
+
});
|
19
|
+
export const ChatToOpenAIChatPromptMapping = () => ({
|
20
|
+
map: (chatPrompt) => {
|
21
|
+
validateChatPrompt(chatPrompt);
|
22
|
+
const messages = [];
|
23
|
+
for (let i = 0; i < chatPrompt.length; i++) {
|
24
|
+
const message = chatPrompt[i];
|
25
|
+
// system message:
|
26
|
+
if (i === 0 &&
|
27
|
+
"system" in message &&
|
28
|
+
typeof message.system === "string") {
|
29
|
+
messages.push({
|
30
|
+
role: "system",
|
31
|
+
content: message.system,
|
32
|
+
});
|
33
|
+
continue;
|
34
|
+
}
|
35
|
+
// user message
|
36
|
+
if ("user" in message) {
|
37
|
+
messages.push({
|
38
|
+
role: "user",
|
39
|
+
content: message.user,
|
40
|
+
});
|
41
|
+
continue;
|
42
|
+
}
|
43
|
+
// ai message:
|
44
|
+
if ("ai" in message) {
|
45
|
+
messages.push({
|
46
|
+
role: "assistant",
|
47
|
+
content: message.ai,
|
48
|
+
});
|
49
|
+
continue;
|
50
|
+
}
|
51
|
+
// unsupported message:
|
52
|
+
throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
|
53
|
+
}
|
54
|
+
return messages;
|
55
|
+
},
|
56
|
+
stopTokens: [],
|
57
|
+
});
|
@@ -0,0 +1 @@
|
|
1
|
+
export {};
|