modelfusion 0.45.3 → 0.47.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +92 -65
- package/index.cjs +0 -1
- package/index.d.ts +0 -1
- package/index.js +0 -1
- package/model-function/ModelFunctionPromise.cjs +37 -0
- package/model-function/ModelFunctionPromise.d.ts +18 -0
- package/model-function/ModelFunctionPromise.js +33 -0
- package/{prompt → model-function}/PromptFormat.d.ts +0 -5
- package/model-function/describe-image/describeImage.cjs +3 -2
- package/model-function/describe-image/describeImage.d.ts +1 -1
- package/model-function/describe-image/describeImage.js +3 -2
- package/model-function/embed/embed.cjs +5 -4
- package/model-function/embed/embed.d.ts +1 -1
- package/model-function/embed/embed.js +5 -4
- package/model-function/executeCall.cjs +3 -46
- package/model-function/executeCall.d.ts +5 -18
- package/model-function/executeCall.js +1 -43
- package/model-function/generate-image/ImageGenerationModel.d.ts +2 -0
- package/model-function/generate-image/ImageGenerationPromise.cjs +50 -0
- package/model-function/generate-image/ImageGenerationPromise.d.ts +22 -0
- package/model-function/generate-image/ImageGenerationPromise.js +46 -0
- package/model-function/generate-image/PromptFormatImageGenerationModel.cjs +44 -0
- package/model-function/generate-image/PromptFormatImageGenerationModel.d.ts +20 -0
- package/model-function/generate-image/PromptFormatImageGenerationModel.js +40 -0
- package/model-function/generate-image/generateImage.cjs +3 -2
- package/model-function/generate-image/generateImage.d.ts +2 -2
- package/model-function/generate-image/generateImage.js +3 -2
- package/model-function/generate-structure/generateStructure.cjs +3 -2
- package/model-function/generate-structure/generateStructure.d.ts +1 -1
- package/model-function/generate-structure/generateStructure.js +3 -2
- package/model-function/generate-structure/generateStructureOrText.cjs +3 -2
- package/model-function/generate-structure/generateStructureOrText.d.ts +1 -1
- package/model-function/generate-structure/generateStructureOrText.js +3 -2
- package/{prompt → model-function/generate-text}/AlpacaPromptFormat.d.ts +2 -2
- package/{prompt → model-function/generate-text}/Llama2PromptFormat.cjs +1 -1
- package/model-function/generate-text/Llama2PromptFormat.d.ts +13 -0
- package/{prompt → model-function/generate-text}/Llama2PromptFormat.js +1 -1
- package/{prompt → model-function/generate-text}/PromptFormatTextGenerationModel.d.ts +7 -7
- package/{prompt → model-function/generate-text}/PromptFormatTextStreamingModel.d.ts +6 -6
- package/model-function/generate-text/TextGenerationModel.d.ts +10 -3
- package/model-function/generate-text/TextGenerationPromptFormat.cjs +2 -0
- package/model-function/generate-text/TextGenerationPromptFormat.d.ts +11 -0
- package/model-function/generate-text/TextGenerationPromptFormat.js +1 -0
- package/{prompt → model-function/generate-text}/TextPromptFormat.cjs +2 -2
- package/model-function/generate-text/TextPromptFormat.d.ts +17 -0
- package/{prompt → model-function/generate-text}/TextPromptFormat.js +2 -2
- package/{prompt → model-function/generate-text}/VicunaPromptFormat.cjs +1 -1
- package/{prompt → model-function/generate-text}/VicunaPromptFormat.d.ts +3 -3
- package/{prompt → model-function/generate-text}/VicunaPromptFormat.js +1 -1
- package/model-function/generate-text/generateText.cjs +6 -3
- package/model-function/generate-text/generateText.d.ts +1 -1
- package/model-function/generate-text/generateText.js +6 -3
- package/{prompt → model-function/generate-text}/index.cjs +9 -4
- package/model-function/generate-text/index.d.ts +16 -0
- package/model-function/generate-text/index.js +16 -0
- package/{prompt/chat → model-function/generate-text}/trimChatPrompt.d.ts +2 -5
- package/model-function/index.cjs +3 -5
- package/model-function/index.d.ts +3 -5
- package/model-function/index.js +3 -5
- package/model-function/synthesize-speech/synthesizeSpeech.cjs +3 -2
- package/model-function/synthesize-speech/synthesizeSpeech.d.ts +1 -1
- package/model-function/synthesize-speech/synthesizeSpeech.js +3 -2
- package/model-function/transcribe-speech/transcribe.cjs +3 -2
- package/model-function/transcribe-speech/transcribe.d.ts +1 -1
- package/model-function/transcribe-speech/transcribe.js +3 -2
- package/model-provider/anthropic/AnthropicPromptFormat.cjs +1 -1
- package/model-provider/anthropic/AnthropicPromptFormat.d.ts +5 -5
- package/model-provider/anthropic/AnthropicPromptFormat.js +1 -1
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +14 -1
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +11 -3
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +14 -1
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +11 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +12 -12
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +11 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationPrompt.cjs +12 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationPrompt.d.ts +10 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationPrompt.js +8 -0
- package/model-provider/automatic1111/index.cjs +1 -0
- package/model-provider/automatic1111/index.d.ts +1 -0
- package/model-provider/automatic1111/index.js +1 -0
- package/model-provider/cohere/CohereTextGenerationModel.cjs +14 -1
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +14 -3
- package/model-provider/cohere/CohereTextGenerationModel.js +14 -1
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +2 -2
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +5 -5
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +2 -2
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +1 -1
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +3 -3
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +1 -1
- package/model-provider/openai/OpenAIImageGenerationModel.cjs +8 -1
- package/model-provider/openai/OpenAIImageGenerationModel.d.ts +6 -3
- package/model-provider/openai/OpenAIImageGenerationModel.js +8 -1
- package/model-provider/openai/OpenAITextGenerationModel.cjs +14 -1
- package/model-provider/openai/OpenAITextGenerationModel.d.ts +14 -3
- package/model-provider/openai/OpenAITextGenerationModel.js +14 -1
- package/model-provider/openai/chat/OpenAIChatModel.cjs +14 -1
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +11 -3
- package/model-provider/openai/chat/OpenAIChatModel.js +14 -1
- package/model-provider/openai/chat/OpenAIChatPromptFormat.cjs +1 -1
- package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +5 -5
- package/model-provider/openai/chat/OpenAIChatPromptFormat.js +1 -1
- package/model-provider/stability/StabilityImageGenerationModel.cjs +11 -0
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +15 -14
- package/model-provider/stability/StabilityImageGenerationModel.js +11 -0
- package/model-provider/stability/StabilityImageGenerationPrompt.cjs +12 -0
- package/model-provider/stability/StabilityImageGenerationPrompt.d.ts +9 -0
- package/model-provider/stability/StabilityImageGenerationPrompt.js +8 -0
- package/model-provider/stability/index.cjs +1 -0
- package/model-provider/stability/index.d.ts +1 -0
- package/model-provider/stability/index.js +1 -0
- package/package.json +1 -1
- package/prompt/Llama2PromptFormat.d.ts +0 -13
- package/prompt/TextPromptFormat.d.ts +0 -17
- package/prompt/index.d.ts +0 -11
- package/prompt/index.js +0 -11
- /package/{prompt → model-function}/PromptFormat.cjs +0 -0
- /package/{prompt → model-function}/PromptFormat.js +0 -0
- /package/{prompt → model-function/generate-text}/AlpacaPromptFormat.cjs +0 -0
- /package/{prompt → model-function/generate-text}/AlpacaPromptFormat.js +0 -0
- /package/{prompt/chat → model-function/generate-text}/ChatPrompt.cjs +0 -0
- /package/{prompt/chat → model-function/generate-text}/ChatPrompt.d.ts +0 -0
- /package/{prompt/chat → model-function/generate-text}/ChatPrompt.js +0 -0
- /package/{prompt → model-function/generate-text}/InstructionPrompt.cjs +0 -0
- /package/{prompt → model-function/generate-text}/InstructionPrompt.d.ts +0 -0
- /package/{prompt → model-function/generate-text}/InstructionPrompt.js +0 -0
- /package/{prompt → model-function/generate-text}/PromptFormatTextGenerationModel.cjs +0 -0
- /package/{prompt → model-function/generate-text}/PromptFormatTextGenerationModel.js +0 -0
- /package/{prompt → model-function/generate-text}/PromptFormatTextStreamingModel.cjs +0 -0
- /package/{prompt → model-function/generate-text}/PromptFormatTextStreamingModel.js +0 -0
- /package/{prompt/chat → model-function/generate-text}/trimChatPrompt.cjs +0 -0
- /package/{prompt/chat → model-function/generate-text}/trimChatPrompt.js +0 -0
- /package/{prompt/chat → model-function/generate-text}/validateChatPrompt.cjs +0 -0
- /package/{prompt/chat → model-function/generate-text}/validateChatPrompt.d.ts +0 -0
- /package/{prompt/chat → model-function/generate-text}/validateChatPrompt.js +0 -0
package/README.md
CHANGED
@@ -35,7 +35,7 @@ Or use a template: [ModelFusion terminal app starter](https://github.com/lgramme
|
|
35
35
|
|
36
36
|
You can provide API keys for the different [integrations](https://modelfusion.dev/integration/model-provider/) using environment variables (e.g., `OPENAI_API_KEY`) or pass them into the model constructors as options.
|
37
37
|
|
38
|
-
### [Generate Text](https://modelfusion.dev/guide/function/generate-text)
|
38
|
+
### [Generate and Stream Text](https://modelfusion.dev/guide/function/generate-text)
|
39
39
|
|
40
40
|
Generate text using a language model and a prompt.
|
41
41
|
You can stream the text if it is supported by the model.
|
@@ -71,70 +71,11 @@ for await (const textFragment of textStream) {
|
|
71
71
|
|
72
72
|
Providers: [OpenAI](https://modelfusion.dev/integration/model-provider/openai), [Anthropic](https://modelfusion.dev/integration/model-provider/anthropic), [Cohere](https://modelfusion.dev/integration/model-provider/cohere), [Llama.cpp](https://modelfusion.dev/integration/model-provider/llamacpp)
|
73
73
|
|
74
|
-
|
74
|
+
### [Generate and Stream Structure](https://modelfusion.dev/guide/function/generate-structure#generatestructure)
|
75
75
|
|
76
|
-
|
76
|
+
Generate typed objects using a language model and a schema.
|
77
77
|
|
78
|
-
|
79
|
-
const text = await generateText(
|
80
|
-
new LlamaCppTextGenerationModel({
|
81
|
-
contextWindowSize: 4096, // Llama 2 context window size
|
82
|
-
maxCompletionTokens: 1000,
|
83
|
-
}).withPromptFormat(mapInstructionPromptToLlama2Format()),
|
84
|
-
{
|
85
|
-
system: "You are a story writer.",
|
86
|
-
instruction: "Write a short story about a robot learning to love.",
|
87
|
-
}
|
88
|
-
);
|
89
|
-
```
|
90
|
-
|
91
|
-
```ts
|
92
|
-
const textStream = await streamText(
|
93
|
-
new OpenAIChatModel({
|
94
|
-
model: "gpt-3.5-turbo",
|
95
|
-
}).withPromptFormat(mapChatPromptToOpenAIChatFormat()),
|
96
|
-
[
|
97
|
-
{ system: "You are a celebrated poet." },
|
98
|
-
{ user: "Write a short story about a robot learning to love." },
|
99
|
-
{ ai: "Once upon a time, there was a robot who learned to love." },
|
100
|
-
{ user: "That's a great start!" },
|
101
|
-
]
|
102
|
-
);
|
103
|
-
```
|
104
|
-
|
105
|
-
| Prompt Format | Instruction Prompt | Chat Prompt |
|
106
|
-
| ------------- | ------------------ | ----------- |
|
107
|
-
| OpenAI Chat | ✅ | ✅ |
|
108
|
-
| Anthropic | ✅ | ✅ |
|
109
|
-
| Llama 2 | ✅ | ✅ |
|
110
|
-
| Alpaca | ✅ | ❌ |
|
111
|
-
| Vicuna | ❌ | ✅ |
|
112
|
-
| Generic Text | ✅ | ✅ |
|
113
|
-
|
114
|
-
#### Metadata and original responses
|
115
|
-
|
116
|
-
ModelFusion model functions return rich results that include the original response and metadata when you call `.asFullResponse()` before resolving the promise.
|
117
|
-
|
118
|
-
```ts
|
119
|
-
// access the full response (needs to be typed) and the metadata:
|
120
|
-
const { value, response, metadata } = await generateText(
|
121
|
-
new OpenAITextGenerationModel({
|
122
|
-
model: "gpt-3.5-turbo-instruct",
|
123
|
-
maxCompletionTokens: 1000,
|
124
|
-
n: 2, // generate 2 completions
|
125
|
-
}),
|
126
|
-
"Write a short story about a robot learning to love:\n\n"
|
127
|
-
).asFullResponse();
|
128
|
-
|
129
|
-
console.log(metadata);
|
130
|
-
|
131
|
-
// cast to the response type:
|
132
|
-
for (const choice of (response as OpenAITextGenerationResponse).choices) {
|
133
|
-
console.log(choice.text);
|
134
|
-
}
|
135
|
-
```
|
136
|
-
|
137
|
-
### [Generate Structure](https://modelfusion.dev/guide/function/generate-structure#generatestructure)
|
78
|
+
#### generateStructure
|
138
79
|
|
139
80
|
Generate a structure that matches a schema.
|
140
81
|
|
@@ -169,7 +110,7 @@ const sentiment = await generateStructure(
|
|
169
110
|
|
170
111
|
Providers: [OpenAI](https://modelfusion.dev/integration/model-provider/openai)
|
171
112
|
|
172
|
-
|
113
|
+
#### streamStructure
|
173
114
|
|
174
115
|
Stream a structure that matches a schema. Partial structures before the final part are untyped JSON.
|
175
116
|
|
@@ -367,7 +308,7 @@ Providers: [HuggingFace](/integration/model-provider/huggingface)
|
|
367
308
|
|
368
309
|
### [Generate Image](https://modelfusion.dev/guide/function/generate-image)
|
369
310
|
|
370
|
-
Generate
|
311
|
+
Generate an image from a prompt.
|
371
312
|
|
372
313
|
```ts
|
373
314
|
const image = await generateImage(
|
@@ -490,6 +431,91 @@ const result = await guard(
|
|
490
431
|
);
|
491
432
|
```
|
492
433
|
|
434
|
+
### Prompt Formats
|
435
|
+
|
436
|
+
Prompt formats let you use higher level prompt structures (such as instruction or chat prompts) for different models.
|
437
|
+
|
438
|
+
#### [Text Generation Prompt Formats](https://modelfusion.dev/guide/function/generate-text/prompt-format)
|
439
|
+
|
440
|
+
```ts
|
441
|
+
const text = await generateText(
|
442
|
+
new LlamaCppTextGenerationModel({
|
443
|
+
contextWindowSize: 4096, // Llama 2 context window size
|
444
|
+
maxCompletionTokens: 1000,
|
445
|
+
}).withPromptFormat(mapInstructionPromptToLlama2Format()),
|
446
|
+
{
|
447
|
+
system: "You are a story writer.",
|
448
|
+
instruction: "Write a short story about a robot learning to love.",
|
449
|
+
}
|
450
|
+
);
|
451
|
+
```
|
452
|
+
|
453
|
+
They can also be accessed through the shorthand methods `.withChatPrompt()` and `.withInstructionPrompt()` for many models:
|
454
|
+
|
455
|
+
```ts
|
456
|
+
const textStream = await streamText(
|
457
|
+
new OpenAIChatModel({
|
458
|
+
model: "gpt-3.5-turbo",
|
459
|
+
}).withChatPrompt(),
|
460
|
+
[
|
461
|
+
{ system: "You are a celebrated poet." },
|
462
|
+
{ user: "Write a short story about a robot learning to love." },
|
463
|
+
{ ai: "Once upon a time, there was a robot who learned to love." },
|
464
|
+
{ user: "That's a great start!" },
|
465
|
+
]
|
466
|
+
);
|
467
|
+
```
|
468
|
+
|
469
|
+
| Prompt Format | Instruction Prompt | Chat Prompt |
|
470
|
+
| ------------- | ------------------ | ----------- |
|
471
|
+
| OpenAI Chat | ✅ | ✅ |
|
472
|
+
| Anthropic | ✅ | ✅ |
|
473
|
+
| Llama 2 | ✅ | ✅ |
|
474
|
+
| Alpaca | ✅ | ❌ |
|
475
|
+
| Vicuna | ❌ | ✅ |
|
476
|
+
| Generic Text | ✅ | ✅ |
|
477
|
+
|
478
|
+
#### [Image Generation Prompt Formats](https://modelfusion.dev/guide/function/generate-image/prompt-format)
|
479
|
+
|
480
|
+
You an use prompt formats with image models as well, e.g. to use a basic text prompt. It is available as a shorthand method:
|
481
|
+
|
482
|
+
```ts
|
483
|
+
const image = await generateImage(
|
484
|
+
new StabilityImageGenerationModel({
|
485
|
+
//...
|
486
|
+
}).withBasicPrompt(),
|
487
|
+
"the wicked witch of the west in the style of early 19th century painting"
|
488
|
+
);
|
489
|
+
```
|
490
|
+
|
491
|
+
| Prompt Format | Basic Text Prompt |
|
492
|
+
| ------------- | ----------------- |
|
493
|
+
| Automatic1111 | ✅ |
|
494
|
+
| Stability | ✅ |
|
495
|
+
|
496
|
+
### Metadata and original responses
|
497
|
+
|
498
|
+
ModelFusion model functions return rich results that include the original response and metadata when you call `.asFullResponse()` before resolving the promise.
|
499
|
+
|
500
|
+
```ts
|
501
|
+
// access the full response (needs to be typed) and the metadata:
|
502
|
+
const { value, response, metadata } = await generateText(
|
503
|
+
new OpenAITextGenerationModel({
|
504
|
+
model: "gpt-3.5-turbo-instruct",
|
505
|
+
maxCompletionTokens: 1000,
|
506
|
+
n: 2, // generate 2 completions
|
507
|
+
}),
|
508
|
+
"Write a short story about a robot learning to love:\n\n"
|
509
|
+
).asFullResponse();
|
510
|
+
|
511
|
+
console.log(metadata);
|
512
|
+
|
513
|
+
// cast to the response type:
|
514
|
+
for (const choice of (response as OpenAITextGenerationResponse).choices) {
|
515
|
+
console.log(choice.text);
|
516
|
+
}
|
517
|
+
```
|
518
|
+
|
493
519
|
### Observability
|
494
520
|
|
495
521
|
Integrations: [Helicone](https://modelfusion.dev/integration/observability/helicone)
|
@@ -509,6 +535,7 @@ Integrations: [Helicone](https://modelfusion.dev/integration/observability/helic
|
|
509
535
|
- [Synthesize Speech](https://modelfusion.dev/guide/function/synthesize-speech)
|
510
536
|
- [Describe Image](https://modelfusion.dev/guide/function/describe-image)
|
511
537
|
- [Generate Image](https://modelfusion.dev/guide/function/generate-image)
|
538
|
+
- [Prompt Format](https://modelfusion.dev/guide/function/generate-image/prompt-format)
|
512
539
|
- [Tools](https://modelfusion.dev/guide/tools)
|
513
540
|
- [Vector Indices](https://modelfusion.dev/guide/vector-index)
|
514
541
|
- [Upsert](https://modelfusion.dev/guide/vector-index/upsert)
|
package/index.cjs
CHANGED
@@ -22,7 +22,6 @@ __exportStar(require("./guard/index.cjs"), exports);
|
|
22
22
|
__exportStar(require("./model-function/index.cjs"), exports);
|
23
23
|
__exportStar(require("./model-provider/index.cjs"), exports);
|
24
24
|
__exportStar(require("./observability/index.cjs"), exports);
|
25
|
-
__exportStar(require("./prompt/index.cjs"), exports);
|
26
25
|
__exportStar(require("./retriever/index.cjs"), exports);
|
27
26
|
__exportStar(require("./text-chunk/index.cjs"), exports);
|
28
27
|
__exportStar(require("./tool/index.cjs"), exports);
|
package/index.d.ts
CHANGED
@@ -6,7 +6,6 @@ export * from "./guard/index.js";
|
|
6
6
|
export * from "./model-function/index.js";
|
7
7
|
export * from "./model-provider/index.js";
|
8
8
|
export * from "./observability/index.js";
|
9
|
-
export * from "./prompt/index.js";
|
10
9
|
export * from "./retriever/index.js";
|
11
10
|
export * from "./text-chunk/index.js";
|
12
11
|
export * from "./tool/index.js";
|
package/index.js
CHANGED
@@ -6,7 +6,6 @@ export * from "./guard/index.js";
|
|
6
6
|
export * from "./model-function/index.js";
|
7
7
|
export * from "./model-provider/index.js";
|
8
8
|
export * from "./observability/index.js";
|
9
|
-
export * from "./prompt/index.js";
|
10
9
|
export * from "./retriever/index.js";
|
11
10
|
export * from "./text-chunk/index.js";
|
12
11
|
export * from "./tool/index.js";
|
@@ -0,0 +1,37 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.ModelFunctionPromise = void 0;
|
4
|
+
class ModelFunctionPromise extends Promise {
|
5
|
+
constructor(fullPromise) {
|
6
|
+
super((resolve) => {
|
7
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
8
|
+
resolve(null); // we override the resolve function
|
9
|
+
});
|
10
|
+
Object.defineProperty(this, "fullPromise", {
|
11
|
+
enumerable: true,
|
12
|
+
configurable: true,
|
13
|
+
writable: true,
|
14
|
+
value: fullPromise
|
15
|
+
});
|
16
|
+
Object.defineProperty(this, "valuePromise", {
|
17
|
+
enumerable: true,
|
18
|
+
configurable: true,
|
19
|
+
writable: true,
|
20
|
+
value: void 0
|
21
|
+
});
|
22
|
+
this.valuePromise = fullPromise.then((result) => result.value);
|
23
|
+
}
|
24
|
+
asFullResponse() {
|
25
|
+
return this.fullPromise;
|
26
|
+
}
|
27
|
+
then(onfulfilled, onrejected) {
|
28
|
+
return this.valuePromise.then(onfulfilled, onrejected);
|
29
|
+
}
|
30
|
+
catch(onrejected) {
|
31
|
+
return this.valuePromise.catch(onrejected);
|
32
|
+
}
|
33
|
+
finally(onfinally) {
|
34
|
+
return this.valuePromise.finally(onfinally);
|
35
|
+
}
|
36
|
+
}
|
37
|
+
exports.ModelFunctionPromise = ModelFunctionPromise;
|
@@ -0,0 +1,18 @@
|
|
1
|
+
import { ModelCallMetadata } from "./ModelCallMetadata.js";
|
2
|
+
export declare class ModelFunctionPromise<VALUE> extends Promise<VALUE> {
|
3
|
+
private fullPromise;
|
4
|
+
private valuePromise;
|
5
|
+
constructor(fullPromise: Promise<{
|
6
|
+
value: VALUE;
|
7
|
+
response: unknown;
|
8
|
+
metadata: ModelCallMetadata;
|
9
|
+
}>);
|
10
|
+
asFullResponse(): Promise<{
|
11
|
+
value: VALUE;
|
12
|
+
response: unknown;
|
13
|
+
metadata: ModelCallMetadata;
|
14
|
+
}>;
|
15
|
+
then<TResult1 = VALUE, TResult2 = never>(onfulfilled?: ((value: VALUE) => TResult1 | PromiseLike<TResult1>) | undefined | null, onrejected?: ((reason: unknown) => TResult2 | PromiseLike<TResult2>) | undefined | null): Promise<TResult1 | TResult2>;
|
16
|
+
catch<TResult = never>(onrejected?: ((reason: unknown) => TResult | PromiseLike<TResult>) | undefined | null): Promise<VALUE | TResult>;
|
17
|
+
finally(onfinally?: (() => void) | undefined | null): Promise<VALUE>;
|
18
|
+
}
|
@@ -0,0 +1,33 @@
|
|
1
|
+
export class ModelFunctionPromise extends Promise {
|
2
|
+
constructor(fullPromise) {
|
3
|
+
super((resolve) => {
|
4
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
5
|
+
resolve(null); // we override the resolve function
|
6
|
+
});
|
7
|
+
Object.defineProperty(this, "fullPromise", {
|
8
|
+
enumerable: true,
|
9
|
+
configurable: true,
|
10
|
+
writable: true,
|
11
|
+
value: fullPromise
|
12
|
+
});
|
13
|
+
Object.defineProperty(this, "valuePromise", {
|
14
|
+
enumerable: true,
|
15
|
+
configurable: true,
|
16
|
+
writable: true,
|
17
|
+
value: void 0
|
18
|
+
});
|
19
|
+
this.valuePromise = fullPromise.then((result) => result.value);
|
20
|
+
}
|
21
|
+
asFullResponse() {
|
22
|
+
return this.fullPromise;
|
23
|
+
}
|
24
|
+
then(onfulfilled, onrejected) {
|
25
|
+
return this.valuePromise.then(onfulfilled, onrejected);
|
26
|
+
}
|
27
|
+
catch(onrejected) {
|
28
|
+
return this.valuePromise.catch(onrejected);
|
29
|
+
}
|
30
|
+
finally(onfinally) {
|
31
|
+
return this.valuePromise.finally(onfinally);
|
32
|
+
}
|
33
|
+
}
|
@@ -6,9 +6,4 @@ export interface PromptFormat<SOURCE_PROMPT, TARGET_PROMPT> {
|
|
6
6
|
* Formats the source prompt into the structure of the target prompt.
|
7
7
|
*/
|
8
8
|
format(sourcePrompt: SOURCE_PROMPT): TARGET_PROMPT;
|
9
|
-
/**
|
10
|
-
* The texts that should be used as default stop sequences.
|
11
|
-
* This is e.g. important for chat formats.
|
12
|
-
*/
|
13
|
-
stopSequences: string[];
|
14
9
|
}
|
@@ -2,13 +2,14 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.describeImage = void 0;
|
4
4
|
const executeCall_js_1 = require("../executeCall.cjs");
|
5
|
+
const ModelFunctionPromise_js_1 = require("../ModelFunctionPromise.cjs");
|
5
6
|
/**
|
6
7
|
* Describe an image as text.
|
7
8
|
*
|
8
9
|
* Depending on the model, this can be used for image captioning, for describing the contents of an image, or for OCR.
|
9
10
|
*/
|
10
11
|
function describeImage(model, data, options) {
|
11
|
-
return (0, executeCall_js_1.executeCall)({
|
12
|
+
return new ModelFunctionPromise_js_1.ModelFunctionPromise((0, executeCall_js_1.executeCall)({
|
12
13
|
functionType: "image-description",
|
13
14
|
input: data,
|
14
15
|
model,
|
@@ -20,6 +21,6 @@ function describeImage(model, data, options) {
|
|
20
21
|
extractedValue: result.description,
|
21
22
|
};
|
22
23
|
},
|
23
|
-
});
|
24
|
+
}));
|
24
25
|
}
|
25
26
|
exports.describeImage = describeImage;
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
2
|
-
import { ModelFunctionPromise } from "../
|
2
|
+
import { ModelFunctionPromise } from "../ModelFunctionPromise.js";
|
3
3
|
import { ImageDescriptionModel, ImageDescriptionModelSettings } from "./ImageDescriptionModel.js";
|
4
4
|
/**
|
5
5
|
* Describe an image as text.
|
@@ -1,11 +1,12 @@
|
|
1
1
|
import { executeCall } from "../executeCall.js";
|
2
|
+
import { ModelFunctionPromise } from "../ModelFunctionPromise.js";
|
2
3
|
/**
|
3
4
|
* Describe an image as text.
|
4
5
|
*
|
5
6
|
* Depending on the model, this can be used for image captioning, for describing the contents of an image, or for OCR.
|
6
7
|
*/
|
7
8
|
export function describeImage(model, data, options) {
|
8
|
-
return executeCall({
|
9
|
+
return new ModelFunctionPromise(executeCall({
|
9
10
|
functionType: "image-description",
|
10
11
|
input: data,
|
11
12
|
model,
|
@@ -17,5 +18,5 @@ export function describeImage(model, data, options) {
|
|
17
18
|
extractedValue: result.description,
|
18
19
|
};
|
19
20
|
},
|
20
|
-
});
|
21
|
+
}));
|
21
22
|
}
|
@@ -2,6 +2,7 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.embed = exports.embedMany = void 0;
|
4
4
|
const executeCall_js_1 = require("../executeCall.cjs");
|
5
|
+
const ModelFunctionPromise_js_1 = require("../ModelFunctionPromise.cjs");
|
5
6
|
/**
|
6
7
|
* Generate embeddings for multiple values.
|
7
8
|
*
|
@@ -15,7 +16,7 @@ const executeCall_js_1 = require("../executeCall.cjs");
|
|
15
16
|
* );
|
16
17
|
*/
|
17
18
|
function embedMany(model, values, options) {
|
18
|
-
return (0, executeCall_js_1.executeCall)({
|
19
|
+
return new ModelFunctionPromise_js_1.ModelFunctionPromise((0, executeCall_js_1.executeCall)({
|
19
20
|
functionType: "embedding",
|
20
21
|
input: values,
|
21
22
|
model,
|
@@ -43,7 +44,7 @@ function embedMany(model, values, options) {
|
|
43
44
|
extractedValue: embeddings,
|
44
45
|
};
|
45
46
|
},
|
46
|
-
});
|
47
|
+
}));
|
47
48
|
}
|
48
49
|
exports.embedMany = embedMany;
|
49
50
|
/**
|
@@ -56,7 +57,7 @@ exports.embedMany = embedMany;
|
|
56
57
|
* );
|
57
58
|
*/
|
58
59
|
function embed(model, value, options) {
|
59
|
-
return (0, executeCall_js_1.executeCall)({
|
60
|
+
return new ModelFunctionPromise_js_1.ModelFunctionPromise((0, executeCall_js_1.executeCall)({
|
60
61
|
functionType: "embedding",
|
61
62
|
input: value,
|
62
63
|
model,
|
@@ -68,6 +69,6 @@ function embed(model, value, options) {
|
|
68
69
|
extractedValue: result.embeddings[0],
|
69
70
|
};
|
70
71
|
},
|
71
|
-
});
|
72
|
+
}));
|
72
73
|
}
|
73
74
|
exports.embed = embed;
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
2
2
|
import { Vector } from "../../core/Vector.js";
|
3
|
-
import { ModelFunctionPromise } from "../
|
3
|
+
import { ModelFunctionPromise } from "../ModelFunctionPromise.js";
|
4
4
|
import { EmbeddingModel, EmbeddingModelSettings } from "./EmbeddingModel.js";
|
5
5
|
/**
|
6
6
|
* Generate embeddings for multiple values.
|
@@ -1,4 +1,5 @@
|
|
1
1
|
import { executeCall } from "../executeCall.js";
|
2
|
+
import { ModelFunctionPromise } from "../ModelFunctionPromise.js";
|
2
3
|
/**
|
3
4
|
* Generate embeddings for multiple values.
|
4
5
|
*
|
@@ -12,7 +13,7 @@ import { executeCall } from "../executeCall.js";
|
|
12
13
|
* );
|
13
14
|
*/
|
14
15
|
export function embedMany(model, values, options) {
|
15
|
-
return executeCall({
|
16
|
+
return new ModelFunctionPromise(executeCall({
|
16
17
|
functionType: "embedding",
|
17
18
|
input: values,
|
18
19
|
model,
|
@@ -40,7 +41,7 @@ export function embedMany(model, values, options) {
|
|
40
41
|
extractedValue: embeddings,
|
41
42
|
};
|
42
43
|
},
|
43
|
-
});
|
44
|
+
}));
|
44
45
|
}
|
45
46
|
/**
|
46
47
|
* Generate an embedding for a single value.
|
@@ -52,7 +53,7 @@ export function embedMany(model, values, options) {
|
|
52
53
|
* );
|
53
54
|
*/
|
54
55
|
export function embed(model, value, options) {
|
55
|
-
return executeCall({
|
56
|
+
return new ModelFunctionPromise(executeCall({
|
56
57
|
functionType: "embedding",
|
57
58
|
input: value,
|
58
59
|
model,
|
@@ -64,5 +65,5 @@ export function embed(model, value, options) {
|
|
64
65
|
extractedValue: result.embeddings[0],
|
65
66
|
};
|
66
67
|
},
|
67
|
-
});
|
68
|
+
}));
|
68
69
|
}
|
@@ -1,6 +1,6 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.executeCall =
|
3
|
+
exports.executeCall = void 0;
|
4
4
|
const nanoid_1 = require("nanoid");
|
5
5
|
const FunctionEventSource_js_1 = require("../core/FunctionEventSource.cjs");
|
6
6
|
const GlobalFunctionLogging_js_1 = require("../core/GlobalFunctionLogging.cjs");
|
@@ -10,51 +10,7 @@ const getFunctionCallLogger_js_1 = require("../core/getFunctionCallLogger.cjs");
|
|
10
10
|
const getRun_js_1 = require("../core/getRun.cjs");
|
11
11
|
const DurationMeasurement_js_1 = require("../util/DurationMeasurement.cjs");
|
12
12
|
const runSafe_js_1 = require("../util/runSafe.cjs");
|
13
|
-
|
14
|
-
constructor(fullPromise) {
|
15
|
-
super((resolve) => {
|
16
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
17
|
-
resolve(null); // we override the resolve function
|
18
|
-
});
|
19
|
-
Object.defineProperty(this, "fullPromise", {
|
20
|
-
enumerable: true,
|
21
|
-
configurable: true,
|
22
|
-
writable: true,
|
23
|
-
value: fullPromise
|
24
|
-
});
|
25
|
-
Object.defineProperty(this, "valuePromise", {
|
26
|
-
enumerable: true,
|
27
|
-
configurable: true,
|
28
|
-
writable: true,
|
29
|
-
value: void 0
|
30
|
-
});
|
31
|
-
this.valuePromise = fullPromise.then((result) => result.value);
|
32
|
-
}
|
33
|
-
asFullResponse() {
|
34
|
-
return this.fullPromise;
|
35
|
-
}
|
36
|
-
then(onfulfilled, onrejected) {
|
37
|
-
return this.valuePromise.then(onfulfilled, onrejected);
|
38
|
-
}
|
39
|
-
catch(onrejected) {
|
40
|
-
return this.valuePromise.catch(onrejected);
|
41
|
-
}
|
42
|
-
finally(onfinally) {
|
43
|
-
return this.valuePromise.finally(onfinally);
|
44
|
-
}
|
45
|
-
}
|
46
|
-
exports.ModelFunctionPromise = ModelFunctionPromise;
|
47
|
-
function executeCall({ model, options, input, functionType, generateResponse, }) {
|
48
|
-
return new ModelFunctionPromise(doExecuteCall({
|
49
|
-
model,
|
50
|
-
options,
|
51
|
-
input,
|
52
|
-
functionType,
|
53
|
-
generateResponse,
|
54
|
-
}));
|
55
|
-
}
|
56
|
-
exports.executeCall = executeCall;
|
57
|
-
async function doExecuteCall({ model, options, input, functionType, generateResponse, }) {
|
13
|
+
async function executeCall({ model, options, input, functionType, generateResponse, }) {
|
58
14
|
const run = await (0, getRun_js_1.getRun)(options?.run);
|
59
15
|
const settings = model.settings;
|
60
16
|
const eventSource = new FunctionEventSource_js_1.FunctionEventSource({
|
@@ -148,3 +104,4 @@ async function doExecuteCall({ model, options, input, functionType, generateResp
|
|
148
104
|
},
|
149
105
|
};
|
150
106
|
}
|
107
|
+
exports.executeCall = executeCall;
|
@@ -2,23 +2,6 @@ import { FunctionOptions } from "../core/FunctionOptions.js";
|
|
2
2
|
import { Model, ModelSettings } from "./Model.js";
|
3
3
|
import { ModelCallStartedEvent } from "./ModelCallEvent.js";
|
4
4
|
import { ModelCallMetadata } from "./ModelCallMetadata.js";
|
5
|
-
export declare class ModelFunctionPromise<VALUE> extends Promise<VALUE> {
|
6
|
-
private fullPromise;
|
7
|
-
private valuePromise;
|
8
|
-
constructor(fullPromise: Promise<{
|
9
|
-
value: VALUE;
|
10
|
-
response: unknown;
|
11
|
-
metadata: ModelCallMetadata;
|
12
|
-
}>);
|
13
|
-
asFullResponse(): Promise<{
|
14
|
-
value: VALUE;
|
15
|
-
response: unknown;
|
16
|
-
metadata: ModelCallMetadata;
|
17
|
-
}>;
|
18
|
-
then<TResult1 = VALUE, TResult2 = never>(onfulfilled?: ((value: VALUE) => TResult1 | PromiseLike<TResult1>) | undefined | null, onrejected?: ((reason: unknown) => TResult2 | PromiseLike<TResult2>) | undefined | null): Promise<TResult1 | TResult2>;
|
19
|
-
catch<TResult = never>(onrejected?: ((reason: unknown) => TResult | PromiseLike<TResult>) | undefined | null): Promise<VALUE | TResult>;
|
20
|
-
finally(onfinally?: (() => void) | undefined | null): Promise<VALUE>;
|
21
|
-
}
|
22
5
|
export declare function executeCall<VALUE, MODEL extends Model<ModelSettings>>({ model, options, input, functionType, generateResponse, }: {
|
23
6
|
model: MODEL;
|
24
7
|
options?: FunctionOptions;
|
@@ -29,4 +12,8 @@ export declare function executeCall<VALUE, MODEL extends Model<ModelSettings>>({
|
|
29
12
|
extractedValue: VALUE;
|
30
13
|
usage?: unknown;
|
31
14
|
}>;
|
32
|
-
}):
|
15
|
+
}): Promise<{
|
16
|
+
value: VALUE;
|
17
|
+
response: unknown;
|
18
|
+
metadata: ModelCallMetadata;
|
19
|
+
}>;
|
@@ -7,49 +7,7 @@ import { getFunctionCallLogger } from "../core/getFunctionCallLogger.js";
|
|
7
7
|
import { getRun } from "../core/getRun.js";
|
8
8
|
import { startDurationMeasurement } from "../util/DurationMeasurement.js";
|
9
9
|
import { runSafe } from "../util/runSafe.js";
|
10
|
-
export
|
11
|
-
constructor(fullPromise) {
|
12
|
-
super((resolve) => {
|
13
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
14
|
-
resolve(null); // we override the resolve function
|
15
|
-
});
|
16
|
-
Object.defineProperty(this, "fullPromise", {
|
17
|
-
enumerable: true,
|
18
|
-
configurable: true,
|
19
|
-
writable: true,
|
20
|
-
value: fullPromise
|
21
|
-
});
|
22
|
-
Object.defineProperty(this, "valuePromise", {
|
23
|
-
enumerable: true,
|
24
|
-
configurable: true,
|
25
|
-
writable: true,
|
26
|
-
value: void 0
|
27
|
-
});
|
28
|
-
this.valuePromise = fullPromise.then((result) => result.value);
|
29
|
-
}
|
30
|
-
asFullResponse() {
|
31
|
-
return this.fullPromise;
|
32
|
-
}
|
33
|
-
then(onfulfilled, onrejected) {
|
34
|
-
return this.valuePromise.then(onfulfilled, onrejected);
|
35
|
-
}
|
36
|
-
catch(onrejected) {
|
37
|
-
return this.valuePromise.catch(onrejected);
|
38
|
-
}
|
39
|
-
finally(onfinally) {
|
40
|
-
return this.valuePromise.finally(onfinally);
|
41
|
-
}
|
42
|
-
}
|
43
|
-
export function executeCall({ model, options, input, functionType, generateResponse, }) {
|
44
|
-
return new ModelFunctionPromise(doExecuteCall({
|
45
|
-
model,
|
46
|
-
options,
|
47
|
-
input,
|
48
|
-
functionType,
|
49
|
-
generateResponse,
|
50
|
-
}));
|
51
|
-
}
|
52
|
-
async function doExecuteCall({ model, options, input, functionType, generateResponse, }) {
|
10
|
+
export async function executeCall({ model, options, input, functionType, generateResponse, }) {
|
53
11
|
const run = await getRun(options?.run);
|
54
12
|
const settings = model.settings;
|
55
13
|
const eventSource = new FunctionEventSource({
|
@@ -1,5 +1,6 @@
|
|
1
1
|
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
2
2
|
import { Model, ModelSettings } from "../Model.js";
|
3
|
+
import { PromptFormat } from "../PromptFormat.js";
|
3
4
|
export interface ImageGenerationModelSettings extends ModelSettings {
|
4
5
|
}
|
5
6
|
export interface ImageGenerationModel<PROMPT, SETTINGS extends ImageGenerationModelSettings = ImageGenerationModelSettings> extends Model<SETTINGS> {
|
@@ -7,4 +8,5 @@ export interface ImageGenerationModel<PROMPT, SETTINGS extends ImageGenerationMo
|
|
7
8
|
response: unknown;
|
8
9
|
base64Image: string;
|
9
10
|
}>;
|
11
|
+
withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, PROMPT>): ImageGenerationModel<INPUT_PROMPT, SETTINGS>;
|
10
12
|
}
|