modelfusion 0.92.0 → 0.93.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +19 -19
- package/core/api/ApiCallError.cjs +9 -1
- package/core/api/ApiCallError.d.ts +4 -1
- package/core/api/ApiCallError.js +9 -1
- package/model-function/{PromptFormat.d.ts → PromptTemplate.d.ts} +2 -2
- package/model-function/generate-image/ImageGenerationModel.d.ts +2 -2
- package/model-function/generate-image/{PromptFormatImageGenerationModel.cjs → PromptTemplateImageGenerationModel.cjs} +11 -11
- package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +20 -0
- package/model-function/generate-image/{PromptFormatImageGenerationModel.js → PromptTemplateImageGenerationModel.js} +9 -9
- package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +6 -6
- package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +4 -4
- package/model-function/generate-structure/StructureFromTextGenerationModel.js +6 -6
- package/model-function/generate-structure/{StructureFromTextPromptFormat.d.ts → StructureFromTextPromptTemplate.d.ts} +1 -1
- package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +4 -4
- package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +2 -2
- package/model-function/generate-structure/StructureFromTextStreamingModel.js +4 -4
- package/model-function/generate-structure/index.cjs +1 -1
- package/model-function/generate-structure/index.d.ts +1 -1
- package/model-function/generate-structure/index.js +1 -1
- package/model-function/generate-structure/jsonStructurePrompt.d.ts +2 -2
- package/model-function/generate-text/{PromptFormatTextGenerationModel.cjs → PromptTemplateTextGenerationModel.cjs} +21 -21
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +35 -0
- package/model-function/generate-text/{PromptFormatTextGenerationModel.js → PromptTemplateTextGenerationModel.js} +19 -19
- package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +38 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +16 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.js +34 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +3 -3
- package/model-function/generate-text/TextGenerationPromptTemplate.d.ts +11 -0
- package/model-function/generate-text/index.cjs +4 -4
- package/model-function/generate-text/index.d.ts +4 -4
- package/model-function/generate-text/index.js +4 -4
- package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.cjs → prompt-template/AlpacaPromptTemplate.cjs} +5 -2
- package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.d.ts → prompt-template/AlpacaPromptTemplate.d.ts} +5 -5
- package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.js → prompt-template/AlpacaPromptTemplate.js} +5 -2
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.test.cjs +31 -0
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.test.js +29 -0
- package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.cjs → prompt-template/ChatMLPromptTemplate.cjs} +5 -5
- package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.d.ts → prompt-template/ChatMLPromptTemplate.d.ts} +7 -7
- package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.js → prompt-template/ChatMLPromptTemplate.js} +5 -5
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.cjs +49 -0
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.js +47 -0
- package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.d.ts +1 -1
- package/model-function/generate-text/prompt-template/Content.js +1 -0
- package/model-function/generate-text/{prompt-format → prompt-template}/InstructionPrompt.d.ts +7 -0
- package/model-function/generate-text/prompt-template/InstructionPrompt.js +1 -0
- package/model-function/generate-text/{prompt-format/Llama2PromptFormat.cjs → prompt-template/Llama2PromptTemplate.cjs} +8 -7
- package/model-function/generate-text/{prompt-format/Llama2PromptFormat.d.ts → prompt-template/Llama2PromptTemplate.d.ts} +7 -6
- package/model-function/generate-text/{prompt-format/Llama2PromptFormat.js → prompt-template/Llama2PromptTemplate.js} +8 -7
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.cjs +49 -0
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.d.ts +1 -0
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.js +47 -0
- package/model-function/generate-text/{prompt-format/TextPromptFormat.cjs → prompt-template/TextPromptTemplate.cjs} +3 -0
- package/model-function/generate-text/{prompt-format/TextPromptFormat.d.ts → prompt-template/TextPromptTemplate.d.ts} +4 -4
- package/model-function/generate-text/{prompt-format/TextPromptFormat.js → prompt-template/TextPromptTemplate.js} +3 -0
- package/model-function/generate-text/prompt-template/TextPromptTemplate.test.cjs +49 -0
- package/model-function/generate-text/prompt-template/TextPromptTemplate.test.d.ts +1 -0
- package/model-function/generate-text/prompt-template/TextPromptTemplate.test.js +47 -0
- package/model-function/generate-text/{prompt-format/VicunaPromptFormat.d.ts → prompt-template/VicunaPromptTemplate.d.ts} +2 -2
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +21 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.d.ts +1 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +19 -0
- package/model-function/generate-text/{prompt-format → prompt-template}/index.cjs +6 -6
- package/model-function/generate-text/prompt-template/index.d.ts +10 -0
- package/model-function/generate-text/prompt-template/index.js +10 -0
- package/model-function/index.cjs +2 -2
- package/model-function/index.d.ts +2 -2
- package/model-function/index.js +2 -2
- package/model-provider/anthropic/{AnthropicPromptFormat.cjs → AnthropicPromptTemplate.cjs} +15 -8
- package/model-provider/anthropic/AnthropicPromptTemplate.d.ts +17 -0
- package/model-provider/anthropic/{AnthropicPromptFormat.js → AnthropicPromptTemplate.js} +15 -8
- package/model-provider/anthropic/AnthropicPromptTemplate.test.cjs +49 -0
- package/model-provider/anthropic/AnthropicPromptTemplate.test.d.ts +1 -0
- package/model-provider/anthropic/AnthropicPromptTemplate.test.js +47 -0
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +12 -12
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +9 -9
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +12 -12
- package/model-provider/anthropic/index.cjs +2 -2
- package/model-provider/anthropic/index.d.ts +1 -1
- package/model-provider/anthropic/index.js +1 -1
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +5 -5
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +4 -4
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +5 -5
- package/model-provider/automatic1111/Automatic1111ImageGenerationPrompt.d.ts +2 -2
- package/model-provider/cohere/CohereTextGenerationModel.cjs +10 -10
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +7 -7
- package/model-provider/cohere/CohereTextGenerationModel.js +10 -10
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +4 -4
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +3 -3
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +4 -4
- package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.cjs → LlamaCppBakLLaVA1PromptTemplate.cjs} +1 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +11 -0
- package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.js → LlamaCppBakLLaVA1PromptTemplate.js} +1 -1
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +12 -12
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +7 -7
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +12 -12
- package/model-provider/llamacpp/index.cjs +2 -2
- package/model-provider/llamacpp/index.d.ts +1 -1
- package/model-provider/llamacpp/index.js +1 -1
- package/model-provider/ollama/OllamaError.cjs +25 -24
- package/model-provider/ollama/OllamaError.d.ts +1 -11
- package/model-provider/ollama/OllamaError.js +24 -22
- package/model-provider/ollama/OllamaTextGenerationModel.cjs +47 -10
- package/model-provider/ollama/OllamaTextGenerationModel.d.ts +12 -8
- package/model-provider/ollama/OllamaTextGenerationModel.js +48 -11
- package/model-provider/ollama/OllamaTextGenerationModel.test.cjs +63 -0
- package/model-provider/ollama/OllamaTextGenerationModel.test.d.ts +1 -0
- package/model-provider/ollama/OllamaTextGenerationModel.test.js +61 -0
- package/model-provider/ollama/index.cjs +1 -3
- package/model-provider/ollama/index.d.ts +1 -1
- package/model-provider/ollama/index.js +0 -1
- package/model-provider/openai/OpenAICompletionModel.cjs +10 -10
- package/model-provider/openai/OpenAICompletionModel.d.ts +7 -7
- package/model-provider/openai/OpenAICompletionModel.js +10 -10
- package/model-provider/openai/OpenAIError.cjs +13 -29
- package/model-provider/openai/OpenAIError.d.ts +2 -11
- package/model-provider/openai/OpenAIError.js +11 -26
- package/model-provider/openai/OpenAIImageGenerationModel.cjs +4 -4
- package/model-provider/openai/OpenAIImageGenerationModel.d.ts +3 -3
- package/model-provider/openai/OpenAIImageGenerationModel.js +4 -4
- package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.cjs +16 -16
- package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts +14 -14
- package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.js +16 -16
- package/model-provider/openai/chat/OpenAIChatMessage.d.ts +1 -1
- package/model-provider/openai/chat/OpenAIChatModel.cjs +15 -15
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +12 -12
- package/model-provider/openai/chat/OpenAIChatModel.js +15 -15
- package/model-provider/openai/chat/{OpenAIChatPromptFormat.cjs → OpenAIChatPromptTemplate.cjs} +1 -1
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.d.ts +20 -0
- package/model-provider/openai/chat/{OpenAIChatPromptFormat.js → OpenAIChatPromptTemplate.js} +1 -1
- package/model-provider/openai/index.cjs +2 -4
- package/model-provider/openai/index.d.ts +2 -2
- package/model-provider/openai/index.js +1 -2
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +14 -14
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +11 -11
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +14 -14
- package/model-provider/stability/StabilityImageGenerationModel.cjs +5 -5
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +4 -4
- package/model-provider/stability/StabilityImageGenerationModel.js +5 -5
- package/model-provider/stability/StabilityImageGenerationPrompt.d.ts +2 -2
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +5 -8
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +5 -8
- package/package.json +5 -4
- package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +2 -2
- package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.cjs +6 -6
- package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +4 -4
- package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js +6 -6
- package/util/AsyncQueue.test.cjs +20 -21
- package/util/AsyncQueue.test.js +9 -10
- package/util/isDeepEqualData.test.cjs +14 -15
- package/util/isDeepEqualData.test.js +14 -15
- package/util/runSafe.test.cjs +12 -13
- package/util/runSafe.test.js +6 -7
- package/model-function/generate-image/PromptFormatImageGenerationModel.d.ts +0 -20
- package/model-function/generate-text/PromptFormatTextGenerationModel.d.ts +0 -35
- package/model-function/generate-text/PromptFormatTextStreamingModel.cjs +0 -38
- package/model-function/generate-text/PromptFormatTextStreamingModel.d.ts +0 -16
- package/model-function/generate-text/PromptFormatTextStreamingModel.js +0 -34
- package/model-function/generate-text/TextGenerationPromptFormat.d.ts +0 -11
- package/model-function/generate-text/prompt-format/index.d.ts +0 -10
- package/model-function/generate-text/prompt-format/index.js +0 -10
- package/model-provider/anthropic/AnthropicPromptFormat.d.ts +0 -17
- package/model-provider/llamacpp/LlamaCppBakLLaVA1Format.d.ts +0 -11
- package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +0 -20
- /package/model-function/{PromptFormat.cjs → PromptTemplate.cjs} +0 -0
- /package/model-function/{PromptFormat.js → PromptTemplate.js} +0 -0
- /package/model-function/generate-structure/{StructureFromTextPromptFormat.cjs → StructureFromTextPromptTemplate.cjs} +0 -0
- /package/model-function/generate-structure/{StructureFromTextPromptFormat.js → StructureFromTextPromptTemplate.js} +0 -0
- /package/model-function/generate-text/{TextGenerationPromptFormat.cjs → TextGenerationPromptTemplate.cjs} +0 -0
- /package/model-function/generate-text/{TextGenerationPromptFormat.js → TextGenerationPromptTemplate.js} +0 -0
- /package/model-function/generate-text/{prompt-format/Content.js → prompt-template/AlpacaPromptTemplate.test.d.ts} +0 -0
- /package/model-function/generate-text/{prompt-format/InstructionPrompt.js → prompt-template/ChatMLPromptTemplate.test.d.ts} +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.cjs +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.js +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/Content.cjs +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/Content.d.ts +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/InstructionPrompt.cjs +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.cjs +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.d.ts +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.js +0 -0
- /package/model-function/generate-text/{prompt-format/VicunaPromptFormat.cjs → prompt-template/VicunaPromptTemplate.cjs} +0 -0
- /package/model-function/generate-text/{prompt-format/VicunaPromptFormat.js → prompt-template/VicunaPromptTemplate.js} +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.cjs +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.d.ts +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.js +0 -0
@@ -3,8 +3,8 @@ import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottl
|
|
3
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
4
|
import { ZodSchema } from "../../core/schema/ZodSchema.js";
|
5
5
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
6
|
-
import {
|
7
|
-
import { chat, instruction, } from "../../model-function/generate-text/prompt-
|
6
|
+
import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
|
7
|
+
import { chat, instruction, } from "../../model-function/generate-text/prompt-template/TextPromptTemplate.js";
|
8
8
|
import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
|
9
9
|
import { AsyncQueue } from "../../util/AsyncQueue.js";
|
10
10
|
import { parseJsonStream } from "../../util/streaming/parseJsonStream.js";
|
@@ -131,26 +131,26 @@ export class CohereTextGenerationModel extends AbstractModel {
|
|
131
131
|
return fullDelta.delta;
|
132
132
|
}
|
133
133
|
/**
|
134
|
-
* Returns this model with an instruction prompt
|
134
|
+
* Returns this model with an instruction prompt template.
|
135
135
|
*/
|
136
136
|
withInstructionPrompt() {
|
137
|
-
return this.
|
137
|
+
return this.withPromptTemplate(instruction());
|
138
138
|
}
|
139
139
|
/**
|
140
|
-
* Returns this model with a chat prompt
|
140
|
+
* Returns this model with a chat prompt template.
|
141
141
|
*/
|
142
142
|
withChatPrompt(options) {
|
143
|
-
return this.
|
143
|
+
return this.withPromptTemplate(chat(options));
|
144
144
|
}
|
145
|
-
|
146
|
-
return new
|
145
|
+
withPromptTemplate(promptTemplate) {
|
146
|
+
return new PromptTemplateTextStreamingModel({
|
147
147
|
model: this.withSettings({
|
148
148
|
stopSequences: [
|
149
149
|
...(this.settings.stopSequences ?? []),
|
150
|
-
...
|
150
|
+
...promptTemplate.stopSequences,
|
151
151
|
],
|
152
152
|
}),
|
153
|
-
|
153
|
+
promptTemplate,
|
154
154
|
});
|
155
155
|
}
|
156
156
|
withSettings(additionalSettings) {
|
@@ -5,7 +5,7 @@ const zod_1 = require("zod");
|
|
5
5
|
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
6
6
|
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
7
7
|
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
8
|
-
const
|
8
|
+
const PromptTemplateTextGenerationModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextGenerationModel.cjs");
|
9
9
|
const HuggingFaceApiConfiguration_js_1 = require("./HuggingFaceApiConfiguration.cjs");
|
10
10
|
const HuggingFaceError_js_1 = require("./HuggingFaceError.cjs");
|
11
11
|
/**
|
@@ -95,10 +95,10 @@ class HuggingFaceTextGenerationModel extends AbstractModel_js_1.AbstractModel {
|
|
95
95
|
text: response[0].generated_text,
|
96
96
|
};
|
97
97
|
}
|
98
|
-
|
99
|
-
return new
|
98
|
+
withPromptTemplate(promptTemplate) {
|
99
|
+
return new PromptTemplateTextGenerationModel_js_1.PromptTemplateTextGenerationModel({
|
100
100
|
model: this,
|
101
|
-
|
101
|
+
promptTemplate,
|
102
102
|
});
|
103
103
|
}
|
104
104
|
withSettings(additionalSettings) {
|
@@ -2,9 +2,9 @@ import { z } from "zod";
|
|
2
2
|
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
3
3
|
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
4
4
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
5
|
-
import {
|
5
|
+
import { PromptTemplateTextGenerationModel } from "../../model-function/generate-text/PromptTemplateTextGenerationModel.js";
|
6
6
|
import { TextGenerationModel, TextGenerationModelSettings } from "../../model-function/generate-text/TextGenerationModel.js";
|
7
|
-
import {
|
7
|
+
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
8
8
|
export interface HuggingFaceTextGenerationModelSettings extends TextGenerationModelSettings {
|
9
9
|
api?: ApiConfiguration;
|
10
10
|
model: string;
|
@@ -53,7 +53,7 @@ export declare class HuggingFaceTextGenerationModel extends AbstractModel<Huggin
|
|
53
53
|
}[];
|
54
54
|
text: string;
|
55
55
|
}>;
|
56
|
-
|
56
|
+
withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextGenerationModel<INPUT_PROMPT, string, HuggingFaceTextGenerationModelSettings, this>;
|
57
57
|
withSettings(additionalSettings: Partial<HuggingFaceTextGenerationModelSettings>): this;
|
58
58
|
}
|
59
59
|
declare const huggingFaceTextGenerationResponseSchema: z.ZodArray<z.ZodObject<{
|
@@ -2,7 +2,7 @@ import { z } from "zod";
|
|
2
2
|
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
3
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
4
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
5
|
-
import {
|
5
|
+
import { PromptTemplateTextGenerationModel } from "../../model-function/generate-text/PromptTemplateTextGenerationModel.js";
|
6
6
|
import { HuggingFaceApiConfiguration } from "./HuggingFaceApiConfiguration.js";
|
7
7
|
import { failedHuggingFaceCallResponseHandler } from "./HuggingFaceError.js";
|
8
8
|
/**
|
@@ -92,10 +92,10 @@ export class HuggingFaceTextGenerationModel extends AbstractModel {
|
|
92
92
|
text: response[0].generated_text,
|
93
93
|
};
|
94
94
|
}
|
95
|
-
|
96
|
-
return new
|
95
|
+
withPromptTemplate(promptTemplate) {
|
96
|
+
return new PromptTemplateTextGenerationModel({
|
97
97
|
model: this,
|
98
|
-
|
98
|
+
promptTemplate,
|
99
99
|
});
|
100
100
|
}
|
101
101
|
withSettings(additionalSettings) {
|
package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.cjs → LlamaCppBakLLaVA1PromptTemplate.cjs}
RENAMED
@@ -1,7 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.chat = exports.instruction = void 0;
|
4
|
-
const ChatPrompt_js_1 = require("../../model-function/generate-text/prompt-
|
4
|
+
const ChatPrompt_js_1 = require("../../model-function/generate-text/prompt-template/ChatPrompt.cjs");
|
5
5
|
// default Vicuna 1 system message
|
6
6
|
const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial intelligence assistant. " +
|
7
7
|
"The assistant gives helpful, detailed, and polite answers to the user's questions.";
|
@@ -0,0 +1,11 @@
|
|
1
|
+
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
2
|
+
import { MultiModalChatPrompt } from "../../model-function/generate-text/prompt-template/ChatPrompt.js";
|
3
|
+
import { MultiModalInstructionPrompt } from "../../model-function/generate-text/prompt-template/InstructionPrompt.js";
|
4
|
+
import { LlamaCppTextGenerationPrompt } from "./LlamaCppTextGenerationModel.js";
|
5
|
+
/**
|
6
|
+
* BakLLaVA 1 uses a Vicuna 1 prompt. This mapping combines it with the LlamaCpp prompt structure.
|
7
|
+
*
|
8
|
+
* @see https://github.com/SkunkworksAI/BakLLaVA
|
9
|
+
*/
|
10
|
+
export declare function instruction(): TextGenerationPromptTemplate<MultiModalInstructionPrompt, LlamaCppTextGenerationPrompt>;
|
11
|
+
export declare function chat(): TextGenerationPromptTemplate<MultiModalChatPrompt, LlamaCppTextGenerationPrompt>;
|
package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.js → LlamaCppBakLLaVA1PromptTemplate.js}
RENAMED
@@ -1,4 +1,4 @@
|
|
1
|
-
import { validateChatPrompt, } from "../../model-function/generate-text/prompt-
|
1
|
+
import { validateChatPrompt, } from "../../model-function/generate-text/prompt-template/ChatPrompt.js";
|
2
2
|
// default Vicuna 1 system message
|
3
3
|
const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial intelligence assistant. " +
|
4
4
|
"The assistant gives helpful, detailed, and polite answers to the user's questions.";
|
@@ -6,7 +6,7 @@ const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndTh
|
|
6
6
|
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
7
7
|
const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
|
8
8
|
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
9
|
-
const
|
9
|
+
const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
|
10
10
|
const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
|
11
11
|
const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
|
12
12
|
const parseEventSourceStream_js_1 = require("../../util/streaming/parseEventSourceStream.cjs");
|
@@ -101,7 +101,7 @@ class LlamaCppTextGenerationModel extends AbstractModel_js_1.AbstractModel {
|
|
101
101
|
});
|
102
102
|
}
|
103
103
|
withTextPrompt() {
|
104
|
-
return this.
|
104
|
+
return this.withPromptTemplate({
|
105
105
|
format(prompt) {
|
106
106
|
return { text: prompt };
|
107
107
|
},
|
@@ -109,31 +109,31 @@ class LlamaCppTextGenerationModel extends AbstractModel_js_1.AbstractModel {
|
|
109
109
|
});
|
110
110
|
}
|
111
111
|
/**
|
112
|
-
* Maps the prompt for a text version of the Llama.cpp prompt
|
112
|
+
* Maps the prompt for a text version of the Llama.cpp prompt template (without image support).
|
113
113
|
*/
|
114
|
-
|
115
|
-
return new
|
114
|
+
withTextPromptTemplate(promptTemplate) {
|
115
|
+
return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
|
116
116
|
model: this.withTextPrompt().withSettings({
|
117
117
|
stopSequences: [
|
118
118
|
...(this.settings.stopSequences ?? []),
|
119
|
-
...
|
119
|
+
...promptTemplate.stopSequences,
|
120
120
|
],
|
121
121
|
}),
|
122
|
-
|
122
|
+
promptTemplate,
|
123
123
|
});
|
124
124
|
}
|
125
125
|
/**
|
126
|
-
* Maps the prompt for the full Llama.cpp prompt
|
126
|
+
* Maps the prompt for the full Llama.cpp prompt template (incl. image support).
|
127
127
|
*/
|
128
|
-
|
129
|
-
return new
|
128
|
+
withPromptTemplate(promptTemplate) {
|
129
|
+
return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
|
130
130
|
model: this.withSettings({
|
131
131
|
stopSequences: [
|
132
132
|
...(this.settings.stopSequences ?? []),
|
133
|
-
...
|
133
|
+
...promptTemplate.stopSequences,
|
134
134
|
],
|
135
135
|
}),
|
136
|
-
|
136
|
+
promptTemplate,
|
137
137
|
});
|
138
138
|
}
|
139
139
|
withSettings(additionalSettings) {
|
@@ -4,9 +4,9 @@ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
|
4
4
|
import { ResponseHandler } from "../../core/api/postToApi.js";
|
5
5
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
6
6
|
import { Delta } from "../../model-function/Delta.js";
|
7
|
-
import {
|
7
|
+
import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
|
8
8
|
import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
|
9
|
-
import {
|
9
|
+
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
10
10
|
import { LlamaCppTokenizer } from "./LlamaCppTokenizer.js";
|
11
11
|
export interface LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE extends number | undefined> extends TextGenerationModelSettings {
|
12
12
|
api?: ApiConfiguration;
|
@@ -110,15 +110,15 @@ export declare class LlamaCppTextGenerationModel<CONTEXT_WINDOW_SIZE extends num
|
|
110
110
|
};
|
111
111
|
}>;
|
112
112
|
doStreamText(prompt: LlamaCppTextGenerationPrompt, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
|
113
|
-
withTextPrompt():
|
113
|
+
withTextPrompt(): PromptTemplateTextStreamingModel<string, LlamaCppTextGenerationPrompt, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
|
114
114
|
/**
|
115
|
-
* Maps the prompt for a text version of the Llama.cpp prompt
|
115
|
+
* Maps the prompt for a text version of the Llama.cpp prompt template (without image support).
|
116
116
|
*/
|
117
|
-
|
117
|
+
withTextPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, PromptTemplateTextStreamingModel<string, LlamaCppTextGenerationPrompt, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>>;
|
118
118
|
/**
|
119
|
-
* Maps the prompt for the full Llama.cpp prompt
|
119
|
+
* Maps the prompt for the full Llama.cpp prompt template (incl. image support).
|
120
120
|
*/
|
121
|
-
|
121
|
+
withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, LlamaCppTextGenerationPrompt>): PromptTemplateTextStreamingModel<INPUT_PROMPT, LlamaCppTextGenerationPrompt, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
|
122
122
|
withSettings(additionalSettings: Partial<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): this;
|
123
123
|
}
|
124
124
|
declare const llamaCppTextGenerationResponseSchema: z.ZodObject<{
|
@@ -3,7 +3,7 @@ import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottl
|
|
3
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
4
|
import { ZodSchema } from "../../core/schema/ZodSchema.js";
|
5
5
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
6
|
-
import {
|
6
|
+
import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
|
7
7
|
import { AsyncQueue } from "../../util/AsyncQueue.js";
|
8
8
|
import { parseJSON } from "../../core/schema/parseJSON.js";
|
9
9
|
import { parseEventSourceStream } from "../../util/streaming/parseEventSourceStream.js";
|
@@ -98,7 +98,7 @@ export class LlamaCppTextGenerationModel extends AbstractModel {
|
|
98
98
|
});
|
99
99
|
}
|
100
100
|
withTextPrompt() {
|
101
|
-
return this.
|
101
|
+
return this.withPromptTemplate({
|
102
102
|
format(prompt) {
|
103
103
|
return { text: prompt };
|
104
104
|
},
|
@@ -106,31 +106,31 @@ export class LlamaCppTextGenerationModel extends AbstractModel {
|
|
106
106
|
});
|
107
107
|
}
|
108
108
|
/**
|
109
|
-
* Maps the prompt for a text version of the Llama.cpp prompt
|
109
|
+
* Maps the prompt for a text version of the Llama.cpp prompt template (without image support).
|
110
110
|
*/
|
111
|
-
|
112
|
-
return new
|
111
|
+
withTextPromptTemplate(promptTemplate) {
|
112
|
+
return new PromptTemplateTextStreamingModel({
|
113
113
|
model: this.withTextPrompt().withSettings({
|
114
114
|
stopSequences: [
|
115
115
|
...(this.settings.stopSequences ?? []),
|
116
|
-
...
|
116
|
+
...promptTemplate.stopSequences,
|
117
117
|
],
|
118
118
|
}),
|
119
|
-
|
119
|
+
promptTemplate,
|
120
120
|
});
|
121
121
|
}
|
122
122
|
/**
|
123
|
-
* Maps the prompt for the full Llama.cpp prompt
|
123
|
+
* Maps the prompt for the full Llama.cpp prompt template (incl. image support).
|
124
124
|
*/
|
125
|
-
|
126
|
-
return new
|
125
|
+
withPromptTemplate(promptTemplate) {
|
126
|
+
return new PromptTemplateTextStreamingModel({
|
127
127
|
model: this.withSettings({
|
128
128
|
stopSequences: [
|
129
129
|
...(this.settings.stopSequences ?? []),
|
130
|
-
...
|
130
|
+
...promptTemplate.stopSequences,
|
131
131
|
],
|
132
132
|
}),
|
133
|
-
|
133
|
+
promptTemplate,
|
134
134
|
});
|
135
135
|
}
|
136
136
|
withSettings(additionalSettings) {
|
@@ -26,9 +26,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
26
26
|
return result;
|
27
27
|
};
|
28
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
29
|
-
exports.llamacpp = exports.LlamaCppError = exports.
|
29
|
+
exports.llamacpp = exports.LlamaCppError = exports.LlamaCppBakLLaVA1Prompt = void 0;
|
30
30
|
__exportStar(require("./LlamaCppApiConfiguration.cjs"), exports);
|
31
|
-
exports.
|
31
|
+
exports.LlamaCppBakLLaVA1Prompt = __importStar(require("./LlamaCppBakLLaVA1PromptTemplate.cjs"));
|
32
32
|
var LlamaCppError_js_1 = require("./LlamaCppError.cjs");
|
33
33
|
Object.defineProperty(exports, "LlamaCppError", { enumerable: true, get: function () { return LlamaCppError_js_1.LlamaCppError; } });
|
34
34
|
exports.llamacpp = __importStar(require("./LlamaCppFacade.cjs"));
|
@@ -1,5 +1,5 @@
|
|
1
1
|
export * from "./LlamaCppApiConfiguration.js";
|
2
|
-
export * as
|
2
|
+
export * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
|
3
3
|
export { LlamaCppError, LlamaCppErrorData } from "./LlamaCppError.js";
|
4
4
|
export * as llamacpp from "./LlamaCppFacade.js";
|
5
5
|
export * from "./LlamaCppTextEmbeddingModel.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
export * from "./LlamaCppApiConfiguration.js";
|
2
|
-
export * as
|
2
|
+
export * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
|
3
3
|
export { LlamaCppError } from "./LlamaCppError.js";
|
4
4
|
export * as llamacpp from "./LlamaCppFacade.js";
|
5
5
|
export * from "./LlamaCppTextEmbeddingModel.js";
|
@@ -1,6 +1,6 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.failedOllamaCallResponseHandler =
|
3
|
+
exports.failedOllamaCallResponseHandler = void 0;
|
4
4
|
const zod_1 = require("zod");
|
5
5
|
const ApiCallError_js_1 = require("../../core/api/ApiCallError.cjs");
|
6
6
|
const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
|
@@ -8,30 +8,31 @@ const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
|
|
8
8
|
const ollamaErrorDataSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.object({
|
9
9
|
error: zod_1.z.string(),
|
10
10
|
}));
|
11
|
-
class OllamaError extends ApiCallError_js_1.ApiCallError {
|
12
|
-
constructor({ statusCode, url, requestBodyValues, data, message = data.error, }) {
|
13
|
-
super({ message, statusCode, requestBodyValues, url });
|
14
|
-
Object.defineProperty(this, "data", {
|
15
|
-
enumerable: true,
|
16
|
-
configurable: true,
|
17
|
-
writable: true,
|
18
|
-
value: void 0
|
19
|
-
});
|
20
|
-
this.data = data;
|
21
|
-
}
|
22
|
-
}
|
23
|
-
exports.OllamaError = OllamaError;
|
24
11
|
const failedOllamaCallResponseHandler = async ({ response, url, requestBodyValues }) => {
|
25
12
|
const responseBody = await response.text();
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
13
|
+
// resilient parsing in case the response is not JSON or does not match the schema:
|
14
|
+
try {
|
15
|
+
const parsedError = (0, parseJSON_js_1.parseJSON)({
|
16
|
+
text: responseBody,
|
17
|
+
schema: ollamaErrorDataSchema,
|
18
|
+
});
|
19
|
+
return new ApiCallError_js_1.ApiCallError({
|
20
|
+
message: parsedError.error,
|
21
|
+
url,
|
22
|
+
requestBodyValues,
|
23
|
+
statusCode: response.status,
|
24
|
+
responseBody,
|
25
|
+
data: parsedError,
|
26
|
+
});
|
27
|
+
}
|
28
|
+
catch (parseError) {
|
29
|
+
return new ApiCallError_js_1.ApiCallError({
|
30
|
+
message: responseBody.trim() !== "" ? responseBody : response.statusText,
|
31
|
+
url,
|
32
|
+
requestBodyValues,
|
33
|
+
statusCode: response.status,
|
34
|
+
responseBody,
|
35
|
+
});
|
36
|
+
}
|
36
37
|
};
|
37
38
|
exports.failedOllamaCallResponseHandler = failedOllamaCallResponseHandler;
|
@@ -4,16 +4,6 @@ import { ZodSchema } from "../../core/schema/ZodSchema.js";
|
|
4
4
|
declare const ollamaErrorDataSchema: ZodSchema<{
|
5
5
|
error: string;
|
6
6
|
}>;
|
7
|
-
type OllamaErrorData = (typeof ollamaErrorDataSchema)["_type"];
|
8
|
-
export declare class OllamaError extends ApiCallError {
|
9
|
-
readonly data: OllamaErrorData;
|
10
|
-
constructor({ statusCode, url, requestBodyValues, data, message, }: {
|
11
|
-
message?: string;
|
12
|
-
statusCode: number;
|
13
|
-
url: string;
|
14
|
-
requestBodyValues: unknown;
|
15
|
-
data: OllamaErrorData;
|
16
|
-
});
|
17
|
-
}
|
7
|
+
export type OllamaErrorData = (typeof ollamaErrorDataSchema)["_type"];
|
18
8
|
export declare const failedOllamaCallResponseHandler: ResponseHandler<ApiCallError>;
|
19
9
|
export {};
|
@@ -5,28 +5,30 @@ import { parseJSON } from "../../core/schema/parseJSON.js";
|
|
5
5
|
const ollamaErrorDataSchema = new ZodSchema(z.object({
|
6
6
|
error: z.string(),
|
7
7
|
}));
|
8
|
-
export class OllamaError extends ApiCallError {
|
9
|
-
constructor({ statusCode, url, requestBodyValues, data, message = data.error, }) {
|
10
|
-
super({ message, statusCode, requestBodyValues, url });
|
11
|
-
Object.defineProperty(this, "data", {
|
12
|
-
enumerable: true,
|
13
|
-
configurable: true,
|
14
|
-
writable: true,
|
15
|
-
value: void 0
|
16
|
-
});
|
17
|
-
this.data = data;
|
18
|
-
}
|
19
|
-
}
|
20
8
|
export const failedOllamaCallResponseHandler = async ({ response, url, requestBodyValues }) => {
|
21
9
|
const responseBody = await response.text();
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
10
|
+
// resilient parsing in case the response is not JSON or does not match the schema:
|
11
|
+
try {
|
12
|
+
const parsedError = parseJSON({
|
13
|
+
text: responseBody,
|
14
|
+
schema: ollamaErrorDataSchema,
|
15
|
+
});
|
16
|
+
return new ApiCallError({
|
17
|
+
message: parsedError.error,
|
18
|
+
url,
|
19
|
+
requestBodyValues,
|
20
|
+
statusCode: response.status,
|
21
|
+
responseBody,
|
22
|
+
data: parsedError,
|
23
|
+
});
|
24
|
+
}
|
25
|
+
catch (parseError) {
|
26
|
+
return new ApiCallError({
|
27
|
+
message: responseBody.trim() !== "" ? responseBody : response.statusText,
|
28
|
+
url,
|
29
|
+
requestBodyValues,
|
30
|
+
statusCode: response.status,
|
31
|
+
responseBody,
|
32
|
+
});
|
33
|
+
}
|
32
34
|
};
|
@@ -2,11 +2,13 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.OllamaTextGenerationResponseFormat = exports.OllamaTextGenerationModel = void 0;
|
4
4
|
const zod_1 = require("zod");
|
5
|
+
const ApiCallError_js_1 = require("../../core/api/ApiCallError.cjs");
|
5
6
|
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
6
7
|
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
7
8
|
const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
|
9
|
+
const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
|
8
10
|
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
9
|
-
const
|
11
|
+
const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
|
10
12
|
const TextGenerationToolCallModel_js_1 = require("../../tool/generate-tool-call/TextGenerationToolCallModel.cjs");
|
11
13
|
const TextGenerationToolCallsOrGenerateTextModel_js_1 = require("../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.cjs");
|
12
14
|
const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
|
@@ -96,27 +98,27 @@ class OllamaTextGenerationModel extends AbstractModel_js_1.AbstractModel {
|
|
96
98
|
responseFormat: exports.OllamaTextGenerationResponseFormat.deltaIterable,
|
97
99
|
});
|
98
100
|
}
|
99
|
-
asToolCallGenerationModel(
|
101
|
+
asToolCallGenerationModel(promptTemplate) {
|
100
102
|
return new TextGenerationToolCallModel_js_1.TextGenerationToolCallModel({
|
101
103
|
model: this,
|
102
|
-
format:
|
104
|
+
format: promptTemplate,
|
103
105
|
});
|
104
106
|
}
|
105
|
-
asToolCallsOrTextGenerationModel(
|
107
|
+
asToolCallsOrTextGenerationModel(promptTemplate) {
|
106
108
|
return new TextGenerationToolCallsOrGenerateTextModel_js_1.TextGenerationToolCallsOrGenerateTextModel({
|
107
109
|
model: this,
|
108
|
-
|
110
|
+
template: promptTemplate,
|
109
111
|
});
|
110
112
|
}
|
111
|
-
|
112
|
-
return new
|
113
|
+
withPromptTemplate(promptTemplate) {
|
114
|
+
return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
|
113
115
|
model: this.withSettings({
|
114
116
|
stopSequences: [
|
115
117
|
...(this.settings.stopSequences ?? []),
|
116
|
-
...
|
118
|
+
...promptTemplate.stopSequences,
|
117
119
|
],
|
118
120
|
}),
|
119
|
-
|
121
|
+
promptTemplate,
|
120
122
|
});
|
121
123
|
}
|
122
124
|
withSettings(additionalSettings) {
|
@@ -238,7 +240,42 @@ exports.OllamaTextGenerationResponseFormat = {
|
|
238
240
|
*/
|
239
241
|
json: {
|
240
242
|
stream: false,
|
241
|
-
handler: (
|
243
|
+
handler: (async ({ response, url, requestBodyValues }) => {
|
244
|
+
const responseBody = await response.text();
|
245
|
+
const parsedResult = (0, parseJSON_js_1.safeParseJSON)({
|
246
|
+
text: responseBody,
|
247
|
+
schema: new ZodSchema_js_1.ZodSchema(zod_1.z.union([
|
248
|
+
ollamaTextGenerationResponseSchema,
|
249
|
+
zod_1.z.object({
|
250
|
+
done: zod_1.z.literal(false),
|
251
|
+
model: zod_1.z.string(),
|
252
|
+
created_at: zod_1.z.string(),
|
253
|
+
response: zod_1.z.string(),
|
254
|
+
}),
|
255
|
+
])),
|
256
|
+
});
|
257
|
+
if (!parsedResult.success) {
|
258
|
+
throw new ApiCallError_js_1.ApiCallError({
|
259
|
+
message: "Invalid JSON response",
|
260
|
+
cause: parsedResult.error,
|
261
|
+
statusCode: response.status,
|
262
|
+
responseBody,
|
263
|
+
url,
|
264
|
+
requestBodyValues,
|
265
|
+
});
|
266
|
+
}
|
267
|
+
if (parsedResult.data.done === false) {
|
268
|
+
throw new ApiCallError_js_1.ApiCallError({
|
269
|
+
message: "Incomplete Ollama response received",
|
270
|
+
statusCode: response.status,
|
271
|
+
responseBody,
|
272
|
+
url,
|
273
|
+
requestBodyValues,
|
274
|
+
isRetryable: true,
|
275
|
+
});
|
276
|
+
}
|
277
|
+
return parsedResult.data;
|
278
|
+
}),
|
242
279
|
},
|
243
280
|
/**
|
244
281
|
* Returns an async iterable over the full deltas (all choices, including full current state at time of event)
|
@@ -4,11 +4,11 @@ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
|
4
4
|
import { ResponseHandler } from "../../core/api/postToApi.js";
|
5
5
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
6
6
|
import { Delta } from "../../model-function/Delta.js";
|
7
|
-
import {
|
7
|
+
import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
|
8
8
|
import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
|
9
|
-
import {
|
10
|
-
import { TextGenerationToolCallModel,
|
11
|
-
import { TextGenerationToolCallsOrGenerateTextModel,
|
9
|
+
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
10
|
+
import { TextGenerationToolCallModel, ToolCallPromptTemplate } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
|
11
|
+
import { TextGenerationToolCallsOrGenerateTextModel, ToolCallsOrGenerateTextPromptTemplate } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
|
12
12
|
/**
|
13
13
|
* @see https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion
|
14
14
|
*/
|
@@ -137,9 +137,9 @@ export declare class OllamaTextGenerationModel<CONTEXT_WINDOW_SIZE extends numbe
|
|
137
137
|
text: string;
|
138
138
|
}>;
|
139
139
|
doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
|
140
|
-
asToolCallGenerationModel<INPUT_PROMPT>(
|
141
|
-
asToolCallsOrTextGenerationModel<INPUT_PROMPT>(
|
142
|
-
|
140
|
+
asToolCallGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallPromptTemplate<INPUT_PROMPT, string>): TextGenerationToolCallModel<INPUT_PROMPT, string, this>;
|
141
|
+
asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallsOrGenerateTextPromptTemplate<INPUT_PROMPT, string>): TextGenerationToolCallsOrGenerateTextModel<INPUT_PROMPT, string, this>;
|
142
|
+
withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, OllamaTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
|
143
143
|
withSettings(additionalSettings: Partial<OllamaTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): this;
|
144
144
|
}
|
145
145
|
declare const ollamaTextGenerationResponseSchema: z.ZodObject<{
|
@@ -189,7 +189,11 @@ export declare const OllamaTextGenerationResponseFormat: {
|
|
189
189
|
*/
|
190
190
|
json: {
|
191
191
|
stream: false;
|
192
|
-
handler:
|
192
|
+
handler: ({ response, url, requestBodyValues }: {
|
193
|
+
url: string;
|
194
|
+
requestBodyValues: unknown;
|
195
|
+
response: Response;
|
196
|
+
}) => Promise<{
|
193
197
|
response: string;
|
194
198
|
model: string;
|
195
199
|
done: true;
|