modelfusion 0.102.0 → 0.104.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +27 -0
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +1 -1
- package/model-function/generate-text/TextGenerationModel.cjs +7 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +3 -1
- package/model-function/generate-text/TextGenerationModel.js +6 -1
- package/model-function/generate-text/TextGenerationResult.cjs +2 -0
- package/model-function/generate-text/TextGenerationResult.d.ts +11 -0
- package/model-function/generate-text/TextGenerationResult.js +1 -0
- package/model-function/generate-text/generateText.cjs +14 -9
- package/model-function/generate-text/generateText.d.ts +3 -0
- package/model-function/generate-text/generateText.js +14 -9
- package/model-function/generate-text/index.cjs +1 -0
- package/model-function/generate-text/index.d.ts +1 -0
- package/model-function/generate-text/index.js +1 -0
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +2 -1
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.d.ts +2 -2
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +2 -1
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs +5 -4
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.d.ts +4 -4
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.js +5 -4
- package/model-function/generate-text/prompt-template/ChatPrompt.cjs +0 -24
- package/model-function/generate-text/prompt-template/ChatPrompt.d.ts +11 -34
- package/model-function/generate-text/prompt-template/ChatPrompt.js +1 -22
- package/model-function/generate-text/prompt-template/Content.cjs +9 -0
- package/model-function/generate-text/prompt-template/Content.d.ts +9 -4
- package/model-function/generate-text/prompt-template/Content.js +7 -1
- package/model-function/generate-text/prompt-template/InstructionPrompt.d.ts +6 -22
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +36 -5
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.d.ts +16 -4
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +34 -4
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs +5 -4
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.d.ts +4 -4
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js +5 -4
- package/model-function/generate-text/prompt-template/TextPromptTemplate.cjs +3 -4
- package/model-function/generate-text/prompt-template/TextPromptTemplate.d.ts +4 -4
- package/model-function/generate-text/prompt-template/TextPromptTemplate.js +3 -4
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +3 -3
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.d.ts +2 -2
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +3 -3
- package/model-function/generate-text/prompt-template/trimChatPrompt.cjs +0 -2
- package/model-function/generate-text/prompt-template/trimChatPrompt.d.ts +4 -4
- package/model-function/generate-text/prompt-template/trimChatPrompt.js +0 -2
- package/model-provider/anthropic/AnthropicPromptTemplate.cjs +5 -4
- package/model-provider/anthropic/AnthropicPromptTemplate.d.ts +4 -4
- package/model-provider/anthropic/AnthropicPromptTemplate.js +5 -4
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +23 -8
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +8 -3
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +24 -9
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +1 -1
- package/model-provider/cohere/CohereTextGenerationModel.cjs +22 -6
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +8 -3
- package/model-provider/cohere/CohereTextGenerationModel.js +22 -6
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +2 -2
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +2 -2
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +9 -8
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +4 -5
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +9 -8
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +23 -16
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +4 -4
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +23 -16
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +51 -51
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +14 -11
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +51 -51
- package/model-provider/mistral/MistralChatModel.cjs +19 -2
- package/model-provider/mistral/MistralChatModel.d.ts +8 -3
- package/model-provider/mistral/MistralChatModel.js +19 -2
- package/model-provider/mistral/MistralPromptTemplate.cjs +5 -4
- package/model-provider/mistral/MistralPromptTemplate.d.ts +4 -4
- package/model-provider/mistral/MistralPromptTemplate.js +5 -4
- package/model-provider/ollama/OllamaChatModel.cjs +8 -3
- package/model-provider/ollama/OllamaChatModel.d.ts +6 -3
- package/model-provider/ollama/OllamaChatModel.js +8 -3
- package/model-provider/ollama/OllamaChatPromptTemplate.cjs +9 -13
- package/model-provider/ollama/OllamaChatPromptTemplate.d.ts +4 -4
- package/model-provider/ollama/OllamaChatPromptTemplate.js +9 -13
- package/model-provider/ollama/OllamaCompletionModel.cjs +8 -3
- package/model-provider/ollama/OllamaCompletionModel.d.ts +4 -1
- package/model-provider/ollama/OllamaCompletionModel.js +8 -3
- package/model-provider/openai/OpenAICompletionModel.cjs +20 -4
- package/model-provider/openai/OpenAICompletionModel.d.ts +8 -3
- package/model-provider/openai/OpenAICompletionModel.js +20 -4
- package/model-provider/openai/chat/AbstractOpenAIChatModel.cjs +19 -1
- package/model-provider/openai/chat/AbstractOpenAIChatModel.d.ts +6 -1
- package/model-provider/openai/chat/AbstractOpenAIChatModel.js +19 -1
- package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts +2 -2
- package/model-provider/openai/chat/OpenAIChatMessage.d.ts +2 -2
- package/model-provider/openai/chat/OpenAIChatModel.cjs +2 -3
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +2 -2
- package/model-provider/openai/chat/OpenAIChatModel.js +2 -3
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.cjs +0 -2
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.d.ts +4 -4
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.js +0 -2
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +2 -3
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +2 -2
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +2 -3
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +6 -6
- package/package.json +2 -2
@@ -69,16 +69,16 @@ export declare class StabilityImageGenerationModel extends AbstractModel<Stabili
|
|
69
69
|
doGenerateImages(prompt: StabilityImageGenerationPrompt, options?: FunctionOptions): Promise<{
|
70
70
|
response: {
|
71
71
|
artifacts: {
|
72
|
+
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
72
73
|
base64: string;
|
73
74
|
seed: number;
|
74
|
-
finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
|
75
75
|
}[];
|
76
76
|
};
|
77
77
|
base64Images: string[];
|
78
78
|
}>;
|
79
79
|
withTextPrompt(): PromptTemplateImageGenerationModel<string, StabilityImageGenerationPrompt, StabilityImageGenerationSettings, this>;
|
80
80
|
withPromptTemplate<INPUT_PROMPT>(promptTemplate: PromptTemplate<INPUT_PROMPT, StabilityImageGenerationPrompt>): PromptTemplateImageGenerationModel<INPUT_PROMPT, StabilityImageGenerationPrompt, StabilityImageGenerationSettings, this>;
|
81
|
-
withSettings(additionalSettings: StabilityImageGenerationSettings): this;
|
81
|
+
withSettings(additionalSettings: Partial<StabilityImageGenerationSettings>): this;
|
82
82
|
}
|
83
83
|
declare const stabilityImageGenerationResponseSchema: z.ZodObject<{
|
84
84
|
artifacts: z.ZodArray<z.ZodObject<{
|
@@ -86,25 +86,25 @@ declare const stabilityImageGenerationResponseSchema: z.ZodObject<{
|
|
86
86
|
seed: z.ZodNumber;
|
87
87
|
finishReason: z.ZodEnum<["SUCCESS", "ERROR", "CONTENT_FILTERED"]>;
|
88
88
|
}, "strip", z.ZodTypeAny, {
|
89
|
+
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
89
90
|
base64: string;
|
90
91
|
seed: number;
|
91
|
-
finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
|
92
92
|
}, {
|
93
|
+
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
93
94
|
base64: string;
|
94
95
|
seed: number;
|
95
|
-
finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
|
96
96
|
}>, "many">;
|
97
97
|
}, "strip", z.ZodTypeAny, {
|
98
98
|
artifacts: {
|
99
|
+
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
99
100
|
base64: string;
|
100
101
|
seed: number;
|
101
|
-
finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
|
102
102
|
}[];
|
103
103
|
}, {
|
104
104
|
artifacts: {
|
105
|
+
finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
|
105
106
|
base64: string;
|
106
107
|
seed: number;
|
107
|
-
finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
|
108
108
|
}[];
|
109
109
|
}>;
|
110
110
|
export type StabilityImageGenerationResponse = z.infer<typeof stabilityImageGenerationResponseSchema>;
|
package/package.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
{
|
2
2
|
"name": "modelfusion",
|
3
3
|
"description": "The TypeScript library for building multi-modal AI applications.",
|
4
|
-
"version": "0.
|
4
|
+
"version": "0.104.0",
|
5
5
|
"author": "Lars Grammel",
|
6
6
|
"license": "MIT",
|
7
7
|
"keywords": [
|
@@ -89,7 +89,7 @@
|
|
89
89
|
"@vitest/coverage-v8": "^1.1.0",
|
90
90
|
"@vitest/ui": "1.1.0",
|
91
91
|
"eslint": "^8.45.0",
|
92
|
-
"eslint-config-prettier": "9.
|
92
|
+
"eslint-config-prettier": "9.1.0",
|
93
93
|
"fastify": "^4.0.0",
|
94
94
|
"msw": "2.0.10"
|
95
95
|
}
|