modelfusion 0.92.1 → 0.93.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +19 -19
- package/model-function/{PromptFormat.d.ts → PromptTemplate.d.ts} +2 -2
- package/model-function/generate-image/ImageGenerationModel.d.ts +2 -2
- package/model-function/generate-image/{PromptFormatImageGenerationModel.cjs → PromptTemplateImageGenerationModel.cjs} +11 -11
- package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +20 -0
- package/model-function/generate-image/{PromptFormatImageGenerationModel.js → PromptTemplateImageGenerationModel.js} +9 -9
- package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +6 -6
- package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +4 -4
- package/model-function/generate-structure/StructureFromTextGenerationModel.js +6 -6
- package/model-function/generate-structure/{StructureFromTextPromptFormat.d.ts → StructureFromTextPromptTemplate.d.ts} +1 -1
- package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +4 -4
- package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +2 -2
- package/model-function/generate-structure/StructureFromTextStreamingModel.js +4 -4
- package/model-function/generate-structure/index.cjs +1 -1
- package/model-function/generate-structure/index.d.ts +1 -1
- package/model-function/generate-structure/index.js +1 -1
- package/model-function/generate-structure/jsonStructurePrompt.d.ts +2 -2
- package/model-function/generate-text/{PromptFormatTextGenerationModel.cjs → PromptTemplateTextGenerationModel.cjs} +21 -21
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +35 -0
- package/model-function/generate-text/{PromptFormatTextGenerationModel.js → PromptTemplateTextGenerationModel.js} +19 -19
- package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +38 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +16 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.js +34 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +3 -3
- package/model-function/generate-text/TextGenerationPromptTemplate.d.ts +11 -0
- package/model-function/generate-text/index.cjs +4 -4
- package/model-function/generate-text/index.d.ts +4 -4
- package/model-function/generate-text/index.js +4 -4
- package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.cjs → prompt-template/AlpacaPromptTemplate.cjs} +5 -2
- package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.d.ts → prompt-template/AlpacaPromptTemplate.d.ts} +5 -5
- package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.js → prompt-template/AlpacaPromptTemplate.js} +5 -2
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.test.cjs +31 -0
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.test.js +29 -0
- package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.cjs → prompt-template/ChatMLPromptTemplate.cjs} +5 -5
- package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.d.ts → prompt-template/ChatMLPromptTemplate.d.ts} +7 -7
- package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.js → prompt-template/ChatMLPromptTemplate.js} +5 -5
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.cjs +49 -0
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.js +47 -0
- package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.d.ts +1 -1
- package/model-function/generate-text/prompt-template/Content.js +1 -0
- package/model-function/generate-text/{prompt-format → prompt-template}/InstructionPrompt.d.ts +7 -0
- package/model-function/generate-text/prompt-template/InstructionPrompt.js +1 -0
- package/model-function/generate-text/{prompt-format/Llama2PromptFormat.cjs → prompt-template/Llama2PromptTemplate.cjs} +8 -7
- package/model-function/generate-text/{prompt-format/Llama2PromptFormat.d.ts → prompt-template/Llama2PromptTemplate.d.ts} +7 -6
- package/model-function/generate-text/{prompt-format/Llama2PromptFormat.js → prompt-template/Llama2PromptTemplate.js} +8 -7
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.cjs +49 -0
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.d.ts +1 -0
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.js +47 -0
- package/model-function/generate-text/{prompt-format/TextPromptFormat.cjs → prompt-template/TextPromptTemplate.cjs} +3 -0
- package/model-function/generate-text/{prompt-format/TextPromptFormat.d.ts → prompt-template/TextPromptTemplate.d.ts} +4 -4
- package/model-function/generate-text/{prompt-format/TextPromptFormat.js → prompt-template/TextPromptTemplate.js} +3 -0
- package/model-function/generate-text/prompt-template/TextPromptTemplate.test.cjs +49 -0
- package/model-function/generate-text/prompt-template/TextPromptTemplate.test.d.ts +1 -0
- package/model-function/generate-text/prompt-template/TextPromptTemplate.test.js +47 -0
- package/model-function/generate-text/{prompt-format/VicunaPromptFormat.d.ts → prompt-template/VicunaPromptTemplate.d.ts} +2 -2
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +21 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.d.ts +1 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +19 -0
- package/model-function/generate-text/{prompt-format → prompt-template}/index.cjs +6 -6
- package/model-function/generate-text/prompt-template/index.d.ts +10 -0
- package/model-function/generate-text/prompt-template/index.js +10 -0
- package/model-function/index.cjs +2 -2
- package/model-function/index.d.ts +2 -2
- package/model-function/index.js +2 -2
- package/model-provider/anthropic/{AnthropicPromptFormat.cjs → AnthropicPromptTemplate.cjs} +15 -8
- package/model-provider/anthropic/AnthropicPromptTemplate.d.ts +17 -0
- package/model-provider/anthropic/{AnthropicPromptFormat.js → AnthropicPromptTemplate.js} +15 -8
- package/model-provider/anthropic/AnthropicPromptTemplate.test.cjs +49 -0
- package/model-provider/anthropic/AnthropicPromptTemplate.test.d.ts +1 -0
- package/model-provider/anthropic/AnthropicPromptTemplate.test.js +47 -0
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +12 -12
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +9 -9
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +12 -12
- package/model-provider/anthropic/index.cjs +2 -2
- package/model-provider/anthropic/index.d.ts +1 -1
- package/model-provider/anthropic/index.js +1 -1
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +5 -5
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +4 -4
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +5 -5
- package/model-provider/automatic1111/Automatic1111ImageGenerationPrompt.d.ts +2 -2
- package/model-provider/cohere/CohereTextGenerationModel.cjs +10 -10
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +7 -7
- package/model-provider/cohere/CohereTextGenerationModel.js +10 -10
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +4 -4
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +3 -3
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +4 -4
- package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.cjs → LlamaCppBakLLaVA1PromptTemplate.cjs} +1 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +11 -0
- package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.js → LlamaCppBakLLaVA1PromptTemplate.js} +1 -1
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +12 -12
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +7 -7
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +12 -12
- package/model-provider/llamacpp/index.cjs +2 -2
- package/model-provider/llamacpp/index.d.ts +1 -1
- package/model-provider/llamacpp/index.js +1 -1
- package/model-provider/ollama/OllamaTextGenerationModel.cjs +9 -9
- package/model-provider/ollama/OllamaTextGenerationModel.d.ts +7 -7
- package/model-provider/ollama/OllamaTextGenerationModel.js +9 -9
- package/model-provider/openai/OpenAICompletionModel.cjs +10 -10
- package/model-provider/openai/OpenAICompletionModel.d.ts +7 -7
- package/model-provider/openai/OpenAICompletionModel.js +10 -10
- package/model-provider/openai/OpenAIImageGenerationModel.cjs +4 -4
- package/model-provider/openai/OpenAIImageGenerationModel.d.ts +3 -3
- package/model-provider/openai/OpenAIImageGenerationModel.js +4 -4
- package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.cjs +16 -16
- package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts +14 -14
- package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.js +16 -16
- package/model-provider/openai/chat/OpenAIChatMessage.d.ts +1 -1
- package/model-provider/openai/chat/OpenAIChatModel.cjs +15 -15
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +12 -12
- package/model-provider/openai/chat/OpenAIChatModel.js +15 -15
- package/model-provider/openai/chat/{OpenAIChatPromptFormat.cjs → OpenAIChatPromptTemplate.cjs} +1 -1
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.d.ts +20 -0
- package/model-provider/openai/chat/{OpenAIChatPromptFormat.js → OpenAIChatPromptTemplate.js} +1 -1
- package/model-provider/openai/index.cjs +2 -2
- package/model-provider/openai/index.d.ts +1 -1
- package/model-provider/openai/index.js +1 -1
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +14 -14
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +11 -11
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +14 -14
- package/model-provider/stability/StabilityImageGenerationModel.cjs +5 -5
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +4 -4
- package/model-provider/stability/StabilityImageGenerationModel.js +5 -5
- package/model-provider/stability/StabilityImageGenerationPrompt.d.ts +2 -2
- package/package.json +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +2 -2
- package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.cjs +6 -6
- package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +4 -4
- package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js +6 -6
- package/model-function/generate-image/PromptFormatImageGenerationModel.d.ts +0 -20
- package/model-function/generate-text/PromptFormatTextGenerationModel.d.ts +0 -35
- package/model-function/generate-text/PromptFormatTextStreamingModel.cjs +0 -38
- package/model-function/generate-text/PromptFormatTextStreamingModel.d.ts +0 -16
- package/model-function/generate-text/PromptFormatTextStreamingModel.js +0 -34
- package/model-function/generate-text/TextGenerationPromptFormat.d.ts +0 -11
- package/model-function/generate-text/prompt-format/index.d.ts +0 -10
- package/model-function/generate-text/prompt-format/index.js +0 -10
- package/model-provider/anthropic/AnthropicPromptFormat.d.ts +0 -17
- package/model-provider/llamacpp/LlamaCppBakLLaVA1Format.d.ts +0 -11
- package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +0 -20
- /package/model-function/{PromptFormat.cjs → PromptTemplate.cjs} +0 -0
- /package/model-function/{PromptFormat.js → PromptTemplate.js} +0 -0
- /package/model-function/generate-structure/{StructureFromTextPromptFormat.cjs → StructureFromTextPromptTemplate.cjs} +0 -0
- /package/model-function/generate-structure/{StructureFromTextPromptFormat.js → StructureFromTextPromptTemplate.js} +0 -0
- /package/model-function/generate-text/{TextGenerationPromptFormat.cjs → TextGenerationPromptTemplate.cjs} +0 -0
- /package/model-function/generate-text/{TextGenerationPromptFormat.js → TextGenerationPromptTemplate.js} +0 -0
- /package/model-function/generate-text/{prompt-format/Content.js → prompt-template/AlpacaPromptTemplate.test.d.ts} +0 -0
- /package/model-function/generate-text/{prompt-format/InstructionPrompt.js → prompt-template/ChatMLPromptTemplate.test.d.ts} +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.cjs +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.js +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/Content.cjs +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/Content.d.ts +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/InstructionPrompt.cjs +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.cjs +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.d.ts +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.js +0 -0
- /package/model-function/generate-text/{prompt-format/VicunaPromptFormat.cjs → prompt-template/VicunaPromptTemplate.cjs} +0 -0
- /package/model-function/generate-text/{prompt-format/VicunaPromptFormat.js → prompt-template/VicunaPromptTemplate.js} +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.cjs +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.d.ts +0 -0
- /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.js +0 -0
package/README.md
CHANGED
@@ -48,7 +48,7 @@ You can provide API keys for the different [integrations](https://modelfusion.de
|
|
48
48
|
### [Generate Text](https://modelfusion.dev/guide/function/generate-text)
|
49
49
|
|
50
50
|
Generate text using a language model and a prompt. You can stream the text if it is supported by the model. You can use images for multi-modal prompting if the model supports it (e.g. with [llama.cpp](https://modelfusion.dev/guide/)).
|
51
|
-
You can use [prompt
|
51
|
+
You can use [prompt templates](https://modelfusion.dev/guide/function/generate-text#prompt-format) to change the prompt template of a model.
|
52
52
|
|
53
53
|
#### generateText
|
54
54
|
|
@@ -509,9 +509,9 @@ const retrievedTexts = await retrieve(
|
|
509
509
|
|
510
510
|
Available Vector Stores: [Memory](https://modelfusion.dev/integration/vector-index/memory), [SQLite VSS](https://modelfusion.dev/integration/vector-index/sqlite-vss), [Pinecone](https://modelfusion.dev/integration/vector-index/pinecone)
|
511
511
|
|
512
|
-
### [Text Generation Prompt
|
512
|
+
### [Text Generation Prompt Templates](https://modelfusion.dev/guide/function/generate-text#prompt-format)
|
513
513
|
|
514
|
-
Prompt
|
514
|
+
Prompt templates let you use higher level prompt structures (such as text, instruction or chat prompts) for different models.
|
515
515
|
|
516
516
|
#### Text Prompt Example
|
517
517
|
|
@@ -536,7 +536,7 @@ const text = await generateText(
|
|
536
536
|
contextWindowSize: 4096, // Llama 2 context window size
|
537
537
|
maxCompletionTokens: 1000,
|
538
538
|
})
|
539
|
-
.
|
539
|
+
.withTextPromptTemplate(Llama2Prompt.instruction()),
|
540
540
|
{
|
541
541
|
system: "You are a story writer.",
|
542
542
|
instruction: "Write a short story about a robot learning to love.",
|
@@ -575,19 +575,19 @@ const textStream = await streamText(
|
|
575
575
|
);
|
576
576
|
```
|
577
577
|
|
578
|
-
| Prompt
|
579
|
-
|
|
580
|
-
| OpenAI Chat
|
581
|
-
| Anthropic
|
582
|
-
| Llama 2
|
583
|
-
| ChatML
|
584
|
-
| Alpaca
|
585
|
-
| Vicuna
|
586
|
-
| Generic Text
|
578
|
+
| Prompt Template | Text Prompt | Instruction Prompt | Chat Prompt |
|
579
|
+
| --------------- | ----------- | ------------------ | ----------- |
|
580
|
+
| OpenAI Chat | ✅ | ✅ | ✅ |
|
581
|
+
| Anthropic | ✅ | ✅ | ✅ |
|
582
|
+
| Llama 2 | ✅ | ✅ | ✅ |
|
583
|
+
| ChatML | ✅ | ✅ | ✅ |
|
584
|
+
| Alpaca | ✅ | ✅ | ❌ |
|
585
|
+
| Vicuna | ❌ | ❌ | ✅ |
|
586
|
+
| Generic Text | ✅ | ✅ | ✅ |
|
587
587
|
|
588
|
-
### [Image Generation Prompt
|
588
|
+
### [Image Generation Prompt Templates](https://modelfusion.dev/guide/function/generate-image/prompt-format)
|
589
589
|
|
590
|
-
You an use prompt
|
590
|
+
You an use prompt templates with image models as well, e.g. to use a basic text prompt. It is available as a shorthand method:
|
591
591
|
|
592
592
|
```ts
|
593
593
|
const image = await generateImage(
|
@@ -600,10 +600,10 @@ const image = await generateImage(
|
|
600
600
|
);
|
601
601
|
```
|
602
602
|
|
603
|
-
| Prompt
|
604
|
-
|
|
605
|
-
| Automatic1111
|
606
|
-
| Stability
|
603
|
+
| Prompt Template | Text Prompt |
|
604
|
+
| --------------- | ----------- |
|
605
|
+
| Automatic1111 | ✅ |
|
606
|
+
| Stability | ✅ |
|
607
607
|
|
608
608
|
### Metadata and original responses
|
609
609
|
|
@@ -1,7 +1,7 @@
|
|
1
1
|
/**
|
2
|
-
* Prompt
|
2
|
+
* Prompt templates format a source prompt into the structure of a target prompt.
|
3
3
|
*/
|
4
|
-
export interface
|
4
|
+
export interface PromptTemplate<SOURCE_PROMPT, TARGET_PROMPT> {
|
5
5
|
/**
|
6
6
|
* Formats the source prompt into the structure of the target prompt.
|
7
7
|
*/
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
2
2
|
import { Model, ModelSettings } from "../Model.js";
|
3
|
-
import {
|
3
|
+
import { PromptTemplate } from "../PromptTemplate.js";
|
4
4
|
export interface ImageGenerationModelSettings extends ModelSettings {
|
5
5
|
}
|
6
6
|
export interface ImageGenerationModel<PROMPT, SETTINGS extends ImageGenerationModelSettings = ImageGenerationModelSettings> extends Model<SETTINGS> {
|
@@ -8,5 +8,5 @@ export interface ImageGenerationModel<PROMPT, SETTINGS extends ImageGenerationMo
|
|
8
8
|
response: unknown;
|
9
9
|
base64Image: string;
|
10
10
|
}>;
|
11
|
-
|
11
|
+
withPromptTemplate<INPUT_PROMPT>(promptTemplate: PromptTemplate<INPUT_PROMPT, PROMPT>): ImageGenerationModel<INPUT_PROMPT, SETTINGS>;
|
12
12
|
}
|
@@ -1,22 +1,22 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.
|
4
|
-
class
|
5
|
-
constructor({ model,
|
3
|
+
exports.PromptTemplateImageGenerationModel = void 0;
|
4
|
+
class PromptTemplateImageGenerationModel {
|
5
|
+
constructor({ model, promptTemplate, }) {
|
6
6
|
Object.defineProperty(this, "model", {
|
7
7
|
enumerable: true,
|
8
8
|
configurable: true,
|
9
9
|
writable: true,
|
10
10
|
value: void 0
|
11
11
|
});
|
12
|
-
Object.defineProperty(this, "
|
12
|
+
Object.defineProperty(this, "promptTemplate", {
|
13
13
|
enumerable: true,
|
14
14
|
configurable: true,
|
15
15
|
writable: true,
|
16
16
|
value: void 0
|
17
17
|
});
|
18
18
|
this.model = model;
|
19
|
-
this.
|
19
|
+
this.promptTemplate = promptTemplate;
|
20
20
|
}
|
21
21
|
get modelInformation() {
|
22
22
|
return this.model.modelInformation;
|
@@ -25,20 +25,20 @@ class PromptFormatImageGenerationModel {
|
|
25
25
|
return this.model.settings;
|
26
26
|
}
|
27
27
|
doGenerateImage(prompt, options) {
|
28
|
-
const mappedPrompt = this.
|
28
|
+
const mappedPrompt = this.promptTemplate.format(prompt);
|
29
29
|
return this.model.doGenerateImage(mappedPrompt, options);
|
30
30
|
}
|
31
31
|
get settingsForEvent() {
|
32
32
|
return this.model.settingsForEvent;
|
33
33
|
}
|
34
|
-
|
35
|
-
return new
|
34
|
+
withPromptTemplate(promptTemplate) {
|
35
|
+
return new PromptTemplateImageGenerationModel({ model: this, promptTemplate: promptTemplate });
|
36
36
|
}
|
37
37
|
withSettings(additionalSettings) {
|
38
|
-
return new
|
38
|
+
return new PromptTemplateImageGenerationModel({
|
39
39
|
model: this.model.withSettings(additionalSettings),
|
40
|
-
|
40
|
+
promptTemplate: this.promptTemplate,
|
41
41
|
});
|
42
42
|
}
|
43
43
|
}
|
44
|
-
exports.
|
44
|
+
exports.PromptTemplateImageGenerationModel = PromptTemplateImageGenerationModel;
|
@@ -0,0 +1,20 @@
|
|
1
|
+
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
2
|
+
import { PromptTemplate } from "../PromptTemplate.js";
|
3
|
+
import { ImageGenerationModel, ImageGenerationModelSettings } from "./ImageGenerationModel.js";
|
4
|
+
export declare class PromptTemplateImageGenerationModel<PROMPT, MODEL_PROMPT, SETTINGS extends ImageGenerationModelSettings, MODEL extends ImageGenerationModel<MODEL_PROMPT, SETTINGS>> implements ImageGenerationModel<PROMPT, SETTINGS> {
|
5
|
+
readonly model: MODEL;
|
6
|
+
readonly promptTemplate: PromptTemplate<PROMPT, MODEL_PROMPT>;
|
7
|
+
constructor({ model, promptTemplate, }: {
|
8
|
+
model: MODEL;
|
9
|
+
promptTemplate: PromptTemplate<PROMPT, MODEL_PROMPT>;
|
10
|
+
});
|
11
|
+
get modelInformation(): import("../ModelInformation.js").ModelInformation;
|
12
|
+
get settings(): SETTINGS;
|
13
|
+
doGenerateImage(prompt: PROMPT, options?: FunctionOptions): PromiseLike<{
|
14
|
+
response: unknown;
|
15
|
+
base64Image: string;
|
16
|
+
}>;
|
17
|
+
get settingsForEvent(): Partial<SETTINGS>;
|
18
|
+
withPromptTemplate<INPUT_PROMPT>(promptTemplate: PromptTemplate<INPUT_PROMPT, PROMPT>): PromptTemplateImageGenerationModel<INPUT_PROMPT, PROMPT, SETTINGS, this>;
|
19
|
+
withSettings(additionalSettings: Partial<SETTINGS>): this;
|
20
|
+
}
|
@@ -1,19 +1,19 @@
|
|
1
|
-
export class
|
2
|
-
constructor({ model,
|
1
|
+
export class PromptTemplateImageGenerationModel {
|
2
|
+
constructor({ model, promptTemplate, }) {
|
3
3
|
Object.defineProperty(this, "model", {
|
4
4
|
enumerable: true,
|
5
5
|
configurable: true,
|
6
6
|
writable: true,
|
7
7
|
value: void 0
|
8
8
|
});
|
9
|
-
Object.defineProperty(this, "
|
9
|
+
Object.defineProperty(this, "promptTemplate", {
|
10
10
|
enumerable: true,
|
11
11
|
configurable: true,
|
12
12
|
writable: true,
|
13
13
|
value: void 0
|
14
14
|
});
|
15
15
|
this.model = model;
|
16
|
-
this.
|
16
|
+
this.promptTemplate = promptTemplate;
|
17
17
|
}
|
18
18
|
get modelInformation() {
|
19
19
|
return this.model.modelInformation;
|
@@ -22,19 +22,19 @@ export class PromptFormatImageGenerationModel {
|
|
22
22
|
return this.model.settings;
|
23
23
|
}
|
24
24
|
doGenerateImage(prompt, options) {
|
25
|
-
const mappedPrompt = this.
|
25
|
+
const mappedPrompt = this.promptTemplate.format(prompt);
|
26
26
|
return this.model.doGenerateImage(mappedPrompt, options);
|
27
27
|
}
|
28
28
|
get settingsForEvent() {
|
29
29
|
return this.model.settingsForEvent;
|
30
30
|
}
|
31
|
-
|
32
|
-
return new
|
31
|
+
withPromptTemplate(promptTemplate) {
|
32
|
+
return new PromptTemplateImageGenerationModel({ model: this, promptTemplate: promptTemplate });
|
33
33
|
}
|
34
34
|
withSettings(additionalSettings) {
|
35
|
-
return new
|
35
|
+
return new PromptTemplateImageGenerationModel({
|
36
36
|
model: this.model.withSettings(additionalSettings),
|
37
|
-
|
37
|
+
promptTemplate: this.promptTemplate,
|
38
38
|
});
|
39
39
|
}
|
40
40
|
}
|
@@ -4,21 +4,21 @@ exports.StructureFromTextGenerationModel = void 0;
|
|
4
4
|
const generateText_js_1 = require("../generate-text/generateText.cjs");
|
5
5
|
const StructureParseError_js_1 = require("./StructureParseError.cjs");
|
6
6
|
class StructureFromTextGenerationModel {
|
7
|
-
constructor({ model,
|
7
|
+
constructor({ model, template, }) {
|
8
8
|
Object.defineProperty(this, "model", {
|
9
9
|
enumerable: true,
|
10
10
|
configurable: true,
|
11
11
|
writable: true,
|
12
12
|
value: void 0
|
13
13
|
});
|
14
|
-
Object.defineProperty(this, "
|
14
|
+
Object.defineProperty(this, "template", {
|
15
15
|
enumerable: true,
|
16
16
|
configurable: true,
|
17
17
|
writable: true,
|
18
18
|
value: void 0
|
19
19
|
});
|
20
20
|
this.model = model;
|
21
|
-
this.
|
21
|
+
this.template = template;
|
22
22
|
}
|
23
23
|
get modelInformation() {
|
24
24
|
return this.model.modelInformation;
|
@@ -30,14 +30,14 @@ class StructureFromTextGenerationModel {
|
|
30
30
|
return this.model.settingsForEvent;
|
31
31
|
}
|
32
32
|
async doGenerateStructure(schema, prompt, options) {
|
33
|
-
const { response, value } = await (0, generateText_js_1.generateText)(this.model, this.
|
33
|
+
const { response, value } = await (0, generateText_js_1.generateText)(this.model, this.template.createPrompt(prompt, schema), {
|
34
34
|
...options,
|
35
35
|
returnType: "full",
|
36
36
|
});
|
37
37
|
try {
|
38
38
|
return {
|
39
39
|
response,
|
40
|
-
value: this.
|
40
|
+
value: this.template.extractStructure(value),
|
41
41
|
valueText: value,
|
42
42
|
};
|
43
43
|
}
|
@@ -51,7 +51,7 @@ class StructureFromTextGenerationModel {
|
|
51
51
|
withSettings(additionalSettings) {
|
52
52
|
return new StructureFromTextGenerationModel({
|
53
53
|
model: this.model.withSettings(additionalSettings),
|
54
|
-
|
54
|
+
template: this.template,
|
55
55
|
});
|
56
56
|
}
|
57
57
|
}
|
@@ -2,14 +2,14 @@ import { FunctionOptions } from "../../core/FunctionOptions.js";
|
|
2
2
|
import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
|
3
3
|
import { Schema } from "../../core/schema/Schema.js";
|
4
4
|
import { TextGenerationModel, TextGenerationModelSettings } from "../generate-text/TextGenerationModel.js";
|
5
|
-
import {
|
5
|
+
import { StructureFromTextPromptTemplate } from "./StructureFromTextPromptTemplate.js";
|
6
6
|
import { StructureGenerationModel } from "./StructureGenerationModel.js";
|
7
7
|
export declare class StructureFromTextGenerationModel<SOURCE_PROMPT, TARGET_PROMPT, MODEL extends TextGenerationModel<TARGET_PROMPT, TextGenerationModelSettings>> implements StructureGenerationModel<SOURCE_PROMPT, MODEL["settings"]> {
|
8
8
|
protected readonly model: MODEL;
|
9
|
-
protected readonly
|
10
|
-
constructor({ model,
|
9
|
+
protected readonly template: StructureFromTextPromptTemplate<SOURCE_PROMPT, TARGET_PROMPT>;
|
10
|
+
constructor({ model, template, }: {
|
11
11
|
model: MODEL;
|
12
|
-
|
12
|
+
template: StructureFromTextPromptTemplate<SOURCE_PROMPT, TARGET_PROMPT>;
|
13
13
|
});
|
14
14
|
get modelInformation(): import("../ModelInformation.js").ModelInformation;
|
15
15
|
get settings(): TextGenerationModelSettings;
|
@@ -1,21 +1,21 @@
|
|
1
1
|
import { generateText } from "../generate-text/generateText.js";
|
2
2
|
import { StructureParseError } from "./StructureParseError.js";
|
3
3
|
export class StructureFromTextGenerationModel {
|
4
|
-
constructor({ model,
|
4
|
+
constructor({ model, template, }) {
|
5
5
|
Object.defineProperty(this, "model", {
|
6
6
|
enumerable: true,
|
7
7
|
configurable: true,
|
8
8
|
writable: true,
|
9
9
|
value: void 0
|
10
10
|
});
|
11
|
-
Object.defineProperty(this, "
|
11
|
+
Object.defineProperty(this, "template", {
|
12
12
|
enumerable: true,
|
13
13
|
configurable: true,
|
14
14
|
writable: true,
|
15
15
|
value: void 0
|
16
16
|
});
|
17
17
|
this.model = model;
|
18
|
-
this.
|
18
|
+
this.template = template;
|
19
19
|
}
|
20
20
|
get modelInformation() {
|
21
21
|
return this.model.modelInformation;
|
@@ -27,14 +27,14 @@ export class StructureFromTextGenerationModel {
|
|
27
27
|
return this.model.settingsForEvent;
|
28
28
|
}
|
29
29
|
async doGenerateStructure(schema, prompt, options) {
|
30
|
-
const { response, value } = await generateText(this.model, this.
|
30
|
+
const { response, value } = await generateText(this.model, this.template.createPrompt(prompt, schema), {
|
31
31
|
...options,
|
32
32
|
returnType: "full",
|
33
33
|
});
|
34
34
|
try {
|
35
35
|
return {
|
36
36
|
response,
|
37
|
-
value: this.
|
37
|
+
value: this.template.extractStructure(value),
|
38
38
|
valueText: value,
|
39
39
|
};
|
40
40
|
}
|
@@ -48,7 +48,7 @@ export class StructureFromTextGenerationModel {
|
|
48
48
|
withSettings(additionalSettings) {
|
49
49
|
return new StructureFromTextGenerationModel({
|
50
50
|
model: this.model.withSettings(additionalSettings),
|
51
|
-
|
51
|
+
template: this.template,
|
52
52
|
});
|
53
53
|
}
|
54
54
|
}
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
|
2
2
|
import { Schema } from "../../core/schema/Schema.js";
|
3
|
-
export type
|
3
|
+
export type StructureFromTextPromptTemplate<SOURCE_PROMPT, TARGET_PROMPT> = {
|
4
4
|
createPrompt: (prompt: SOURCE_PROMPT, schema: Schema<unknown> & JsonSchemaProducer) => TARGET_PROMPT;
|
5
5
|
extractStructure: (response: string) => unknown;
|
6
6
|
};
|
@@ -12,7 +12,7 @@ class StructureFromTextStreamingModel extends StructureFromTextGenerationModel_j
|
|
12
12
|
super(options);
|
13
13
|
}
|
14
14
|
async doStreamStructure(schema, prompt, options) {
|
15
|
-
const textStream = await (0, streamText_js_1.streamText)(this.model, this.
|
15
|
+
const textStream = await (0, streamText_js_1.streamText)(this.model, this.template.createPrompt(prompt, schema), options);
|
16
16
|
const queue = new AsyncQueue_js_1.AsyncQueue();
|
17
17
|
// run async on purpose:
|
18
18
|
(async () => {
|
@@ -41,14 +41,14 @@ class StructureFromTextStreamingModel extends StructureFromTextGenerationModel_j
|
|
41
41
|
return queue;
|
42
42
|
}
|
43
43
|
async doGenerateStructure(schema, prompt, options) {
|
44
|
-
const { response, value } = await (0, generateText_js_1.generateText)(this.model, this.
|
44
|
+
const { response, value } = await (0, generateText_js_1.generateText)(this.model, this.template.createPrompt(prompt, schema), {
|
45
45
|
...options,
|
46
46
|
returnType: "full",
|
47
47
|
});
|
48
48
|
try {
|
49
49
|
return {
|
50
50
|
response,
|
51
|
-
value: this.
|
51
|
+
value: this.template.extractStructure(value),
|
52
52
|
valueText: value,
|
53
53
|
};
|
54
54
|
}
|
@@ -62,7 +62,7 @@ class StructureFromTextStreamingModel extends StructureFromTextGenerationModel_j
|
|
62
62
|
withSettings(additionalSettings) {
|
63
63
|
return new StructureFromTextStreamingModel({
|
64
64
|
model: this.model.withSettings(additionalSettings),
|
65
|
-
|
65
|
+
template: this.template,
|
66
66
|
});
|
67
67
|
}
|
68
68
|
}
|
@@ -5,12 +5,12 @@ import { Delta } from "../../model-function/Delta.js";
|
|
5
5
|
import { AsyncQueue } from "../../util/AsyncQueue.js";
|
6
6
|
import { TextGenerationModelSettings, TextStreamingModel } from "../generate-text/TextGenerationModel.js";
|
7
7
|
import { StructureFromTextGenerationModel } from "./StructureFromTextGenerationModel.js";
|
8
|
-
import {
|
8
|
+
import { StructureFromTextPromptTemplate } from "./StructureFromTextPromptTemplate.js";
|
9
9
|
import { StructureStreamingModel } from "./StructureGenerationModel.js";
|
10
10
|
export declare class StructureFromTextStreamingModel<SOURCE_PROMPT, TARGET_PROMPT, MODEL extends TextStreamingModel<TARGET_PROMPT, TextGenerationModelSettings>> extends StructureFromTextGenerationModel<SOURCE_PROMPT, TARGET_PROMPT, MODEL> implements StructureStreamingModel<SOURCE_PROMPT, MODEL["settings"]> {
|
11
11
|
constructor(options: {
|
12
12
|
model: MODEL;
|
13
|
-
|
13
|
+
template: StructureFromTextPromptTemplate<SOURCE_PROMPT, TARGET_PROMPT>;
|
14
14
|
});
|
15
15
|
doStreamStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: SOURCE_PROMPT, options?: FunctionOptions): Promise<AsyncQueue<Delta<unknown>>>;
|
16
16
|
doGenerateStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: SOURCE_PROMPT, options?: FunctionOptions): Promise<{
|
@@ -9,7 +9,7 @@ export class StructureFromTextStreamingModel extends StructureFromTextGeneration
|
|
9
9
|
super(options);
|
10
10
|
}
|
11
11
|
async doStreamStructure(schema, prompt, options) {
|
12
|
-
const textStream = await streamText(this.model, this.
|
12
|
+
const textStream = await streamText(this.model, this.template.createPrompt(prompt, schema), options);
|
13
13
|
const queue = new AsyncQueue();
|
14
14
|
// run async on purpose:
|
15
15
|
(async () => {
|
@@ -38,14 +38,14 @@ export class StructureFromTextStreamingModel extends StructureFromTextGeneration
|
|
38
38
|
return queue;
|
39
39
|
}
|
40
40
|
async doGenerateStructure(schema, prompt, options) {
|
41
|
-
const { response, value } = await generateText(this.model, this.
|
41
|
+
const { response, value } = await generateText(this.model, this.template.createPrompt(prompt, schema), {
|
42
42
|
...options,
|
43
43
|
returnType: "full",
|
44
44
|
});
|
45
45
|
try {
|
46
46
|
return {
|
47
47
|
response,
|
48
|
-
value: this.
|
48
|
+
value: this.template.extractStructure(value),
|
49
49
|
valueText: value,
|
50
50
|
};
|
51
51
|
}
|
@@ -59,7 +59,7 @@ export class StructureFromTextStreamingModel extends StructureFromTextGeneration
|
|
59
59
|
withSettings(additionalSettings) {
|
60
60
|
return new StructureFromTextStreamingModel({
|
61
61
|
model: this.model.withSettings(additionalSettings),
|
62
|
-
|
62
|
+
template: this.template,
|
63
63
|
});
|
64
64
|
}
|
65
65
|
}
|
@@ -15,7 +15,7 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
15
15
|
};
|
16
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
17
17
|
__exportStar(require("./StructureFromTextGenerationModel.cjs"), exports);
|
18
|
-
__exportStar(require("./
|
18
|
+
__exportStar(require("./StructureFromTextPromptTemplate.cjs"), exports);
|
19
19
|
__exportStar(require("./StructureFromTextStreamingModel.cjs"), exports);
|
20
20
|
__exportStar(require("./StructureGenerationEvent.cjs"), exports);
|
21
21
|
__exportStar(require("./StructureGenerationModel.cjs"), exports);
|
@@ -1,5 +1,5 @@
|
|
1
1
|
export * from "./StructureFromTextGenerationModel.js";
|
2
|
-
export * from "./
|
2
|
+
export * from "./StructureFromTextPromptTemplate.js";
|
3
3
|
export * from "./StructureFromTextStreamingModel.js";
|
4
4
|
export * from "./StructureGenerationEvent.js";
|
5
5
|
export * from "./StructureGenerationModel.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
export * from "./StructureFromTextGenerationModel.js";
|
2
|
-
export * from "./
|
2
|
+
export * from "./StructureFromTextPromptTemplate.js";
|
3
3
|
export * from "./StructureFromTextStreamingModel.js";
|
4
4
|
export * from "./StructureGenerationEvent.js";
|
5
5
|
export * from "./StructureGenerationModel.js";
|
@@ -1,4 +1,4 @@
|
|
1
1
|
import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
|
2
2
|
import { Schema } from "../../core/schema/Schema.js";
|
3
|
-
import {
|
4
|
-
export declare function jsonStructurePrompt<SOURCE_PROMPT, TARGET_PROMPT>(createPrompt: (prompt: SOURCE_PROMPT, schema: Schema<unknown> & JsonSchemaProducer) => TARGET_PROMPT):
|
3
|
+
import { StructureFromTextPromptTemplate } from "./StructureFromTextPromptTemplate.js";
|
4
|
+
export declare function jsonStructurePrompt<SOURCE_PROMPT, TARGET_PROMPT>(createPrompt: (prompt: SOURCE_PROMPT, schema: Schema<unknown> & JsonSchemaProducer) => TARGET_PROMPT): StructureFromTextPromptTemplate<SOURCE_PROMPT, TARGET_PROMPT>;
|
@@ -1,25 +1,25 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.
|
4
|
-
const StructureFromTextGenerationModel_js_1 = require("../../model-function/generate-structure/StructureFromTextGenerationModel.cjs");
|
3
|
+
exports.PromptTemplateTextGenerationModel = void 0;
|
5
4
|
const TextGenerationToolCallModel_js_1 = require("../../tool/generate-tool-call/TextGenerationToolCallModel.cjs");
|
6
5
|
const TextGenerationToolCallsOrGenerateTextModel_js_1 = require("../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.cjs");
|
7
|
-
|
8
|
-
|
6
|
+
const StructureFromTextGenerationModel_js_1 = require("../generate-structure/StructureFromTextGenerationModel.cjs");
|
7
|
+
class PromptTemplateTextGenerationModel {
|
8
|
+
constructor({ model, promptTemplate, }) {
|
9
9
|
Object.defineProperty(this, "model", {
|
10
10
|
enumerable: true,
|
11
11
|
configurable: true,
|
12
12
|
writable: true,
|
13
13
|
value: void 0
|
14
14
|
});
|
15
|
-
Object.defineProperty(this, "
|
15
|
+
Object.defineProperty(this, "promptTemplate", {
|
16
16
|
enumerable: true,
|
17
17
|
configurable: true,
|
18
18
|
writable: true,
|
19
19
|
value: void 0
|
20
20
|
});
|
21
21
|
this.model = model;
|
22
|
-
this.
|
22
|
+
this.promptTemplate = promptTemplate;
|
23
23
|
}
|
24
24
|
get modelInformation() {
|
25
25
|
return this.model.modelInformation;
|
@@ -38,49 +38,49 @@ class PromptFormatTextGenerationModel {
|
|
38
38
|
if (originalCountPromptTokens === undefined) {
|
39
39
|
return undefined;
|
40
40
|
}
|
41
|
-
return ((prompt) => originalCountPromptTokens(this.
|
41
|
+
return ((prompt) => originalCountPromptTokens(this.promptTemplate.format(prompt)));
|
42
42
|
}
|
43
43
|
doGenerateText(prompt, options) {
|
44
|
-
const mappedPrompt = this.
|
44
|
+
const mappedPrompt = this.promptTemplate.format(prompt);
|
45
45
|
return this.model.doGenerateText(mappedPrompt, options);
|
46
46
|
}
|
47
47
|
get settingsForEvent() {
|
48
48
|
return this.model.settingsForEvent;
|
49
49
|
}
|
50
|
-
asToolCallGenerationModel(
|
50
|
+
asToolCallGenerationModel(promptTemplate) {
|
51
51
|
return new TextGenerationToolCallModel_js_1.TextGenerationToolCallModel({
|
52
52
|
model: this,
|
53
|
-
format:
|
53
|
+
format: promptTemplate,
|
54
54
|
});
|
55
55
|
}
|
56
|
-
asToolCallsOrTextGenerationModel(
|
56
|
+
asToolCallsOrTextGenerationModel(promptTemplate) {
|
57
57
|
return new TextGenerationToolCallsOrGenerateTextModel_js_1.TextGenerationToolCallsOrGenerateTextModel({
|
58
58
|
model: this,
|
59
|
-
|
59
|
+
template: promptTemplate,
|
60
60
|
});
|
61
61
|
}
|
62
|
-
asStructureGenerationModel(
|
62
|
+
asStructureGenerationModel(promptTemplate) {
|
63
63
|
return new StructureFromTextGenerationModel_js_1.StructureFromTextGenerationModel({
|
64
64
|
model: this,
|
65
|
-
|
65
|
+
template: promptTemplate,
|
66
66
|
});
|
67
67
|
}
|
68
|
-
|
69
|
-
return new
|
68
|
+
withPromptTemplate(promptTemplate) {
|
69
|
+
return new PromptTemplateTextGenerationModel({
|
70
70
|
model: this.withSettings({
|
71
71
|
stopSequences: [
|
72
72
|
...(this.settings.stopSequences ?? []),
|
73
|
-
...
|
73
|
+
...promptTemplate.stopSequences,
|
74
74
|
],
|
75
75
|
}),
|
76
|
-
|
76
|
+
promptTemplate,
|
77
77
|
});
|
78
78
|
}
|
79
79
|
withSettings(additionalSettings) {
|
80
|
-
return new
|
80
|
+
return new PromptTemplateTextGenerationModel({
|
81
81
|
model: this.model.withSettings(additionalSettings),
|
82
|
-
|
82
|
+
promptTemplate: this.promptTemplate,
|
83
83
|
});
|
84
84
|
}
|
85
85
|
}
|
86
|
-
exports.
|
86
|
+
exports.PromptTemplateTextGenerationModel = PromptTemplateTextGenerationModel;
|
@@ -0,0 +1,35 @@
|
|
1
|
+
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
2
|
+
import { TextGenerationToolCallModel, ToolCallPromptTemplate } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
|
3
|
+
import { TextGenerationToolCallsOrGenerateTextModel, ToolCallsOrGenerateTextPromptTemplate } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
|
4
|
+
import { StructureFromTextGenerationModel } from "../generate-structure/StructureFromTextGenerationModel.js";
|
5
|
+
import { StructureFromTextPromptTemplate } from "../generate-structure/StructureFromTextPromptTemplate.js";
|
6
|
+
import { TextGenerationModel, TextGenerationModelSettings } from "./TextGenerationModel.js";
|
7
|
+
import { TextGenerationPromptTemplate } from "./TextGenerationPromptTemplate.js";
|
8
|
+
export declare class PromptTemplateTextGenerationModel<PROMPT, MODEL_PROMPT, SETTINGS extends TextGenerationModelSettings, MODEL extends TextGenerationModel<MODEL_PROMPT, SETTINGS>> implements TextGenerationModel<PROMPT, SETTINGS> {
|
9
|
+
readonly model: MODEL;
|
10
|
+
readonly promptTemplate: TextGenerationPromptTemplate<PROMPT, MODEL_PROMPT>;
|
11
|
+
constructor({ model, promptTemplate, }: {
|
12
|
+
model: MODEL;
|
13
|
+
promptTemplate: TextGenerationPromptTemplate<PROMPT, MODEL_PROMPT>;
|
14
|
+
});
|
15
|
+
get modelInformation(): import("../ModelInformation.js").ModelInformation;
|
16
|
+
get settings(): SETTINGS;
|
17
|
+
get tokenizer(): MODEL["tokenizer"];
|
18
|
+
get contextWindowSize(): MODEL["contextWindowSize"];
|
19
|
+
get countPromptTokens(): MODEL["countPromptTokens"] extends undefined ? undefined : (prompt: PROMPT) => PromiseLike<number>;
|
20
|
+
doGenerateText(prompt: PROMPT, options?: FunctionOptions): PromiseLike<{
|
21
|
+
response: unknown;
|
22
|
+
text: string;
|
23
|
+
usage?: {
|
24
|
+
promptTokens: number;
|
25
|
+
completionTokens: number;
|
26
|
+
totalTokens: number;
|
27
|
+
} | undefined;
|
28
|
+
}>;
|
29
|
+
get settingsForEvent(): Partial<SETTINGS>;
|
30
|
+
asToolCallGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallPromptTemplate<INPUT_PROMPT, PROMPT>): TextGenerationToolCallModel<INPUT_PROMPT, PROMPT, this>;
|
31
|
+
asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallsOrGenerateTextPromptTemplate<INPUT_PROMPT, PROMPT>): TextGenerationToolCallsOrGenerateTextModel<INPUT_PROMPT, PROMPT, this>;
|
32
|
+
asStructureGenerationModel<INPUT_PROMPT>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, PROMPT>): StructureFromTextGenerationModel<INPUT_PROMPT, PROMPT, this>;
|
33
|
+
withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, PROMPT>): PromptTemplateTextGenerationModel<INPUT_PROMPT, PROMPT, SETTINGS, this>;
|
34
|
+
withSettings(additionalSettings: Partial<SETTINGS>): this;
|
35
|
+
}
|