modelfusion 0.20.0 → 0.21.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -8
- package/composed-function/summarize/SummarizationFunction.d.ts +3 -3
- package/model-function/Model.d.ts +5 -2
- package/model-function/ModelCallEvent.d.ts +3 -3
- package/model-function/ModelFunctionOptions.d.ts +4 -0
- package/model-function/SuccessfulModelCall.cjs +3 -15
- package/model-function/SuccessfulModelCall.d.ts +2 -2
- package/model-function/SuccessfulModelCall.js +3 -15
- package/model-function/embed-text/TextEmbeddingEvent.d.ts +6 -8
- package/model-function/embed-text/TextEmbeddingModel.d.ts +2 -2
- package/model-function/embed-text/embedText.cjs +16 -22
- package/model-function/embed-text/embedText.d.ts +3 -3
- package/model-function/embed-text/embedText.js +16 -22
- package/model-function/executeCall.cjs +30 -8
- package/model-function/executeCall.d.ts +16 -9
- package/model-function/executeCall.js +30 -8
- package/model-function/generate-image/ImageGenerationEvent.d.ts +6 -8
- package/model-function/generate-image/ImageGenerationModel.d.ts +2 -2
- package/model-function/generate-image/generateImage.cjs +8 -11
- package/model-function/generate-image/generateImage.d.ts +2 -2
- package/model-function/generate-image/generateImage.js +8 -11
- package/model-function/generate-json/GenerateJsonModel.d.ts +2 -2
- package/model-function/generate-json/GenerateJsonOrTextModel.d.ts +2 -2
- package/model-function/generate-json/JsonGenerationEvent.d.ts +6 -8
- package/model-function/generate-json/JsonTextGenerationModel.d.ts +2 -2
- package/model-function/generate-json/generateJson.cjs +8 -11
- package/model-function/generate-json/generateJson.d.ts +2 -2
- package/model-function/generate-json/generateJson.js +8 -11
- package/model-function/generate-json/generateJsonOrText.cjs +8 -11
- package/model-function/generate-json/generateJsonOrText.d.ts +2 -2
- package/model-function/generate-json/generateJsonOrText.js +8 -11
- package/model-function/generate-text/TextGenerationEvent.d.ts +6 -8
- package/model-function/generate-text/TextGenerationModel.d.ts +3 -3
- package/model-function/generate-text/TextStreamingEvent.d.ts +6 -8
- package/model-function/generate-text/generateText.cjs +8 -11
- package/model-function/generate-text/generateText.d.ts +2 -2
- package/model-function/generate-text/generateText.js +8 -11
- package/model-function/generate-text/streamText.cjs +38 -23
- package/model-function/generate-text/streamText.d.ts +5 -5
- package/model-function/generate-text/streamText.js +38 -23
- package/model-function/index.cjs +1 -1
- package/model-function/index.d.ts +1 -1
- package/model-function/index.js +1 -1
- package/model-function/synthesize-speech/SpeechSynthesisEvent.d.ts +6 -8
- package/model-function/synthesize-speech/SpeechSynthesisModel.d.ts +2 -2
- package/model-function/synthesize-speech/synthesizeSpeech.cjs +8 -10
- package/model-function/synthesize-speech/synthesizeSpeech.d.ts +2 -2
- package/model-function/synthesize-speech/synthesizeSpeech.js +8 -10
- package/model-function/transcribe-speech/TranscriptionEvent.d.ts +6 -8
- package/model-function/transcribe-speech/TranscriptionModel.d.ts +2 -2
- package/model-function/transcribe-speech/transcribe.cjs +8 -11
- package/model-function/transcribe-speech/transcribe.d.ts +2 -2
- package/model-function/transcribe-speech/transcribe.js +8 -11
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +3 -3
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +4 -4
- package/model-provider/elevenlabs/ElevenLabsSpeechSynthesisModel.d.ts +2 -2
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +3 -3
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +3 -3
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +3 -3
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +4 -4
- package/model-provider/openai/OpenAIImageGenerationModel.d.ts +3 -3
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +3 -3
- package/model-provider/openai/OpenAITextGenerationModel.d.ts +4 -4
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +3 -3
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +5 -5
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +3 -3
- package/package.json +1 -1
- package/prompt/PromptFormatTextGenerationModel.d.ts +3 -3
- package/run/ConsoleLogger.cjs +1 -4
- package/run/ConsoleLogger.d.ts +4 -5
- package/run/ConsoleLogger.js +1 -4
- package/run/DefaultRun.cjs +1 -4
- package/run/DefaultRun.d.ts +5 -5
- package/run/DefaultRun.js +1 -4
- package/run/FunctionEvent.d.ts +51 -0
- package/run/{RunFunctionEventSource.js → FunctionEventSource.cjs} +7 -13
- package/run/FunctionEventSource.d.ts +12 -0
- package/run/{RunFunctionEventSource.cjs → FunctionEventSource.js} +3 -17
- package/run/FunctionObserver.d.ts +7 -0
- package/run/FunctionOptions.d.ts +19 -0
- package/run/GlobalFunctionObservers.cjs +12 -0
- package/run/GlobalFunctionObservers.d.ts +3 -0
- package/run/GlobalFunctionObservers.js +7 -0
- package/run/Run.d.ts +2 -2
- package/run/index.cjs +5 -5
- package/run/index.d.ts +5 -5
- package/run/index.js +5 -5
- package/text-chunk/SimilarTextChunksFromVectorIndexRetriever.cjs +1 -0
- package/text-chunk/SimilarTextChunksFromVectorIndexRetriever.d.ts +2 -2
- package/text-chunk/SimilarTextChunksFromVectorIndexRetriever.js +1 -0
- package/text-chunk/retrieve-text-chunks/TextChunkRetriever.d.ts +2 -2
- package/text-chunk/retrieve-text-chunks/retrieveTextChunks.d.ts +2 -2
- package/text-chunk/split/SplitFunction.d.ts +3 -3
- package/text-chunk/upsertTextChunks.d.ts +2 -2
- package/tool/ExecuteToolEvent.d.ts +6 -8
- package/tool/Tool.d.ts +3 -3
- package/tool/WebSearchTool.d.ts +2 -1
- package/tool/executeTool.cjs +28 -26
- package/tool/executeTool.d.ts +5 -4
- package/tool/executeTool.js +28 -26
- package/tool/index.cjs +1 -0
- package/tool/index.d.ts +1 -0
- package/tool/index.js +1 -0
- package/tool/useTool.cjs +2 -6
- package/tool/useTool.d.ts +2 -2
- package/tool/useTool.js +2 -6
- package/tool/useToolOrGenerateText.cjs +1 -3
- package/tool/useToolOrGenerateText.d.ts +2 -2
- package/tool/useToolOrGenerateText.js +1 -3
- package/util/DurationMeasurement.cjs +6 -0
- package/util/DurationMeasurement.d.ts +1 -0
- package/util/DurationMeasurement.js +6 -0
- package/util/api/postToApi.cjs +8 -0
- package/util/api/postToApi.js +8 -0
- package/model-function/FunctionOptions.d.ts +0 -6
- package/run/IdMetadata.d.ts +0 -7
- package/run/RunFunction.d.ts +0 -9
- package/run/RunFunctionEvent.d.ts +0 -12
- package/run/RunFunctionEventSource.d.ts +0 -13
- package/run/RunFunctionObserver.cjs +0 -2
- package/run/RunFunctionObserver.d.ts +0 -5
- package/run/RunFunctionObserver.js +0 -1
- /package/model-function/{FunctionOptions.cjs → ModelFunctionOptions.cjs} +0 -0
- /package/model-function/{FunctionOptions.js → ModelFunctionOptions.js} +0 -0
- /package/run/{IdMetadata.cjs → FunctionEvent.cjs} +0 -0
- /package/run/{IdMetadata.js → FunctionEvent.js} +0 -0
- /package/run/{RunFunction.cjs → FunctionObserver.cjs} +0 -0
- /package/run/{RunFunction.js → FunctionObserver.js} +0 -0
- /package/run/{RunFunctionEvent.cjs → FunctionOptions.cjs} +0 -0
- /package/run/{RunFunctionEvent.js → FunctionOptions.js} +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
import { z } from "zod";
|
2
2
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
-
import {
|
3
|
+
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
4
4
|
import { DeltaEvent } from "../../model-function/generate-text/DeltaEvent.js";
|
5
5
|
import { TextGenerationModel, TextGenerationModelSettings } from "../../model-function/generate-text/TextGenerationModel.js";
|
6
6
|
import { PromptFormat } from "../../prompt/PromptFormat.js";
|
@@ -72,8 +72,8 @@ export declare class CohereTextGenerationModel extends AbstractModel<CohereTextG
|
|
72
72
|
countPromptTokens(input: string): Promise<number>;
|
73
73
|
callAPI<RESPONSE>(prompt: string, options: {
|
74
74
|
responseFormat: CohereTextGenerationResponseFormatType<RESPONSE>;
|
75
|
-
} &
|
76
|
-
generateTextResponse(prompt: string, options?:
|
75
|
+
} & ModelFunctionOptions<CohereTextGenerationModelSettings>): Promise<RESPONSE>;
|
76
|
+
generateTextResponse(prompt: string, options?: ModelFunctionOptions<CohereTextGenerationModelSettings>): Promise<{
|
77
77
|
prompt: string;
|
78
78
|
id: string;
|
79
79
|
generations: {
|
@@ -88,7 +88,7 @@ export declare class CohereTextGenerationModel extends AbstractModel<CohereTextG
|
|
88
88
|
} | undefined;
|
89
89
|
}>;
|
90
90
|
extractText(response: CohereTextGenerationResponse): string;
|
91
|
-
generateDeltaStreamResponse(prompt: string, options?:
|
91
|
+
generateDeltaStreamResponse(prompt: string, options?: ModelFunctionOptions<CohereTextGenerationModelSettings>): Promise<AsyncIterable<DeltaEvent<CohereTextGenerationDelta>>>;
|
92
92
|
extractTextDelta(fullDelta: CohereTextGenerationDelta): string | undefined;
|
93
93
|
withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, string>): PromptFormatTextGenerationModel<INPUT_PROMPT, string, CohereTextGenerationResponse, CohereTextGenerationDelta, CohereTextGenerationModelSettings, this>;
|
94
94
|
withSettings(additionalSettings: Partial<CohereTextGenerationModelSettings>): this;
|
@@ -1,6 +1,6 @@
|
|
1
1
|
/// <reference types="node" resolution-mode="require"/>
|
2
2
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
-
import {
|
3
|
+
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
4
4
|
import { SpeechSynthesisModel, SpeechSynthesisModelSettings } from "../../model-function/synthesize-speech/SpeechSynthesisModel.js";
|
5
5
|
import { RetryFunction } from "../../util/api/RetryFunction.js";
|
6
6
|
import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
|
@@ -24,6 +24,6 @@ export declare class ElevenLabsSpeechSynthesisModel extends AbstractModel<Eleven
|
|
24
24
|
readonly modelName: null;
|
25
25
|
private get apiKey();
|
26
26
|
private callAPI;
|
27
|
-
generateSpeechResponse(text: string, options?:
|
27
|
+
generateSpeechResponse(text: string, options?: ModelFunctionOptions<ElevenLabsSpeechSynthesisModelSettings> | undefined): Promise<Buffer>;
|
28
28
|
withSettings(additionalSettings: Partial<ElevenLabsSpeechSynthesisModelSettings>): this;
|
29
29
|
}
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import z from "zod";
|
2
2
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
-
import {
|
3
|
+
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
4
4
|
import { TextEmbeddingModel, TextEmbeddingModelSettings } from "../../model-function/embed-text/TextEmbeddingModel.js";
|
5
5
|
import { RetryFunction } from "../../util/api/RetryFunction.js";
|
6
6
|
import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
|
@@ -46,9 +46,9 @@ export declare class HuggingFaceTextEmbeddingModel extends AbstractModel<Hugging
|
|
46
46
|
readonly embeddingDimensions: number | undefined;
|
47
47
|
readonly tokenizer: undefined;
|
48
48
|
private get apiKey();
|
49
|
-
callAPI(texts: Array<string>, options?:
|
49
|
+
callAPI(texts: Array<string>, options?: ModelFunctionOptions<HuggingFaceTextEmbeddingModelSettings>): Promise<HuggingFaceTextEmbeddingResponse>;
|
50
50
|
readonly countPromptTokens: undefined;
|
51
|
-
generateEmbeddingResponse(texts: string[], options?:
|
51
|
+
generateEmbeddingResponse(texts: string[], options?: ModelFunctionOptions<HuggingFaceTextEmbeddingModelSettings>): Promise<number[][]>;
|
52
52
|
extractEmbeddings(response: HuggingFaceTextEmbeddingResponse): number[][];
|
53
53
|
withSettings(additionalSettings: Partial<HuggingFaceTextEmbeddingModelSettings>): this;
|
54
54
|
}
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import z from "zod";
|
2
2
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
-
import {
|
3
|
+
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
4
4
|
import { TextGenerationModel, TextGenerationModelSettings } from "../../model-function/generate-text/TextGenerationModel.js";
|
5
5
|
import { RetryFunction } from "../../util/api/RetryFunction.js";
|
6
6
|
import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
|
@@ -49,9 +49,9 @@ export declare class HuggingFaceTextGenerationModel extends AbstractModel<Huggin
|
|
49
49
|
readonly contextWindowSize: undefined;
|
50
50
|
readonly tokenizer: undefined;
|
51
51
|
private get apiKey();
|
52
|
-
callAPI(prompt: string, options?:
|
52
|
+
callAPI(prompt: string, options?: ModelFunctionOptions<HuggingFaceTextGenerationModelSettings>): Promise<HuggingFaceTextGenerationResponse>;
|
53
53
|
readonly countPromptTokens: undefined;
|
54
|
-
generateTextResponse(prompt: string, options?:
|
54
|
+
generateTextResponse(prompt: string, options?: ModelFunctionOptions<HuggingFaceTextGenerationModelSettings>): Promise<{
|
55
55
|
generated_text: string;
|
56
56
|
}[]>;
|
57
57
|
extractText(response: HuggingFaceTextGenerationResponse): string;
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import z from "zod";
|
2
2
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
-
import {
|
3
|
+
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
4
4
|
import { TextEmbeddingModel, TextEmbeddingModelSettings } from "../../model-function/embed-text/TextEmbeddingModel.js";
|
5
5
|
import { RetryFunction } from "../../util/api/RetryFunction.js";
|
6
6
|
import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
|
@@ -23,8 +23,8 @@ export declare class LlamaCppTextEmbeddingModel extends AbstractModel<LlamaCppTe
|
|
23
23
|
readonly embeddingDimensions: number | undefined;
|
24
24
|
private readonly tokenizer;
|
25
25
|
tokenize(text: string): Promise<number[]>;
|
26
|
-
callAPI(texts: Array<string>, options?:
|
27
|
-
generateEmbeddingResponse(texts: string[], options?:
|
26
|
+
callAPI(texts: Array<string>, options?: ModelFunctionOptions<LlamaCppTextEmbeddingModelSettings>): Promise<LlamaCppTextEmbeddingResponse>;
|
27
|
+
generateEmbeddingResponse(texts: string[], options?: ModelFunctionOptions<LlamaCppTextEmbeddingModelSettings>): Promise<{
|
28
28
|
embedding: number[];
|
29
29
|
}>;
|
30
30
|
extractEmbeddings(response: LlamaCppTextEmbeddingResponse): number[][];
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import z from "zod";
|
2
2
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
-
import {
|
3
|
+
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
4
4
|
import { DeltaEvent } from "../../model-function/generate-text/DeltaEvent.js";
|
5
5
|
import { TextGenerationModel, TextGenerationModelSettings } from "../../model-function/generate-text/TextGenerationModel.js";
|
6
6
|
import { PromptFormat } from "../../prompt/PromptFormat.js";
|
@@ -46,9 +46,9 @@ export declare class LlamaCppTextGenerationModel<CONTEXT_WINDOW_SIZE extends num
|
|
46
46
|
readonly tokenizer: LlamaCppTokenizer;
|
47
47
|
callAPI<RESPONSE>(prompt: string, options: {
|
48
48
|
responseFormat: LlamaCppTextGenerationResponseFormatType<RESPONSE>;
|
49
|
-
} &
|
49
|
+
} & ModelFunctionOptions<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): Promise<RESPONSE>;
|
50
50
|
countPromptTokens(prompt: string): Promise<number>;
|
51
|
-
generateTextResponse(prompt: string, options?:
|
51
|
+
generateTextResponse(prompt: string, options?: ModelFunctionOptions<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): Promise<{
|
52
52
|
model: string;
|
53
53
|
prompt: string;
|
54
54
|
content: string;
|
@@ -98,7 +98,7 @@ export declare class LlamaCppTextGenerationModel<CONTEXT_WINDOW_SIZE extends num
|
|
98
98
|
truncated: boolean;
|
99
99
|
}>;
|
100
100
|
extractText(response: LlamaCppTextGenerationResponse): string;
|
101
|
-
generateDeltaStreamResponse(prompt: string, options?:
|
101
|
+
generateDeltaStreamResponse(prompt: string, options?: ModelFunctionOptions<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): Promise<AsyncIterable<DeltaEvent<LlamaCppTextGenerationDelta>>>;
|
102
102
|
extractTextDelta(fullDelta: LlamaCppTextGenerationDelta): string | undefined;
|
103
103
|
withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, string>): PromptFormatTextGenerationModel<INPUT_PROMPT, string, LlamaCppTextGenerationResponse, LlamaCppTextGenerationDelta, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
|
104
104
|
withSettings(additionalSettings: Partial<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): this;
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { z } from "zod";
|
2
2
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
-
import {
|
3
|
+
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
4
4
|
import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-function/generate-image/ImageGenerationModel.js";
|
5
5
|
import { ResponseHandler } from "../../util/api/postToApi.js";
|
6
6
|
import { OpenAIModelSettings } from "./OpenAIModelSettings.js";
|
@@ -32,10 +32,10 @@ export declare class OpenAIImageGenerationModel extends AbstractModel<OpenAIImag
|
|
32
32
|
private get apiKey();
|
33
33
|
callAPI<RESULT>(prompt: string, options: {
|
34
34
|
responseFormat: OpenAIImageGenerationResponseFormatType<RESULT>;
|
35
|
-
} &
|
35
|
+
} & ModelFunctionOptions<Partial<OpenAIImageGenerationCallSettings & OpenAIModelSettings & {
|
36
36
|
user?: string;
|
37
37
|
}>>): Promise<RESULT>;
|
38
|
-
generateImageResponse(prompt: string, options?:
|
38
|
+
generateImageResponse(prompt: string, options?: ModelFunctionOptions<OpenAIImageGenerationSettings>): Promise<{
|
39
39
|
data: {
|
40
40
|
b64_json: string;
|
41
41
|
}[];
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import z from "zod";
|
2
2
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
-
import {
|
3
|
+
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
4
4
|
import { TextEmbeddingModel, TextEmbeddingModelSettings } from "../../model-function/embed-text/TextEmbeddingModel.js";
|
5
5
|
import { RetryFunction } from "../../util/api/RetryFunction.js";
|
6
6
|
import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
|
@@ -50,8 +50,8 @@ export declare class OpenAITextEmbeddingModel extends AbstractModel<OpenAITextEm
|
|
50
50
|
readonly contextWindowSize: number;
|
51
51
|
private get apiKey();
|
52
52
|
countTokens(input: string): Promise<number>;
|
53
|
-
callAPI(text: string, options?:
|
54
|
-
generateEmbeddingResponse(texts: string[], options?:
|
53
|
+
callAPI(text: string, options?: ModelFunctionOptions<OpenAITextEmbeddingModelSettings>): Promise<OpenAITextEmbeddingResponse>;
|
54
|
+
generateEmbeddingResponse(texts: string[], options?: ModelFunctionOptions<OpenAITextEmbeddingModelSettings>): Promise<{
|
55
55
|
object: "list";
|
56
56
|
model: string;
|
57
57
|
data: {
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import z from "zod";
|
2
2
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
-
import {
|
3
|
+
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
4
4
|
import { DeltaEvent } from "../../model-function/generate-text/DeltaEvent.js";
|
5
5
|
import { TextGenerationModel, TextGenerationModelSettings } from "../../model-function/generate-text/TextGenerationModel.js";
|
6
6
|
import { PromptFormat } from "../../prompt/PromptFormat.js";
|
@@ -117,10 +117,10 @@ export declare class OpenAITextGenerationModel extends AbstractModel<OpenAITextG
|
|
117
117
|
countPromptTokens(input: string): Promise<number>;
|
118
118
|
callAPI<RESULT>(prompt: string, options: {
|
119
119
|
responseFormat: OpenAITextResponseFormatType<RESULT>;
|
120
|
-
} &
|
120
|
+
} & ModelFunctionOptions<Partial<OpenAIImageGenerationCallSettings & OpenAIModelSettings & {
|
121
121
|
user?: string;
|
122
122
|
}>>): Promise<RESULT>;
|
123
|
-
generateTextResponse(prompt: string, options?:
|
123
|
+
generateTextResponse(prompt: string, options?: ModelFunctionOptions<OpenAITextGenerationModelSettings>): Promise<{
|
124
124
|
object: "text_completion";
|
125
125
|
model: string;
|
126
126
|
id: string;
|
@@ -138,7 +138,7 @@ export declare class OpenAITextGenerationModel extends AbstractModel<OpenAITextG
|
|
138
138
|
}[];
|
139
139
|
}>;
|
140
140
|
extractText(response: OpenAITextGenerationResponse): string;
|
141
|
-
generateDeltaStreamResponse(prompt: string, options?:
|
141
|
+
generateDeltaStreamResponse(prompt: string, options?: ModelFunctionOptions<OpenAITextGenerationModelSettings>): Promise<AsyncIterable<DeltaEvent<OpenAITextGenerationDelta>>>;
|
142
142
|
extractTextDelta(fullDelta: OpenAITextGenerationDelta): string | undefined;
|
143
143
|
withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, string>): PromptFormatTextGenerationModel<INPUT_PROMPT, string, OpenAITextGenerationResponse, OpenAITextGenerationDelta, OpenAITextGenerationModelSettings, this>;
|
144
144
|
withSettings(additionalSettings: Partial<OpenAITextGenerationModelSettings>): this;
|
@@ -1,7 +1,7 @@
|
|
1
1
|
/// <reference types="node" resolution-mode="require"/>
|
2
2
|
import z from "zod";
|
3
3
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
4
|
-
import {
|
4
|
+
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
5
5
|
import { TranscriptionModel, TranscriptionModelSettings } from "../../model-function/transcribe-speech/TranscriptionModel.js";
|
6
6
|
import { RetryFunction } from "../../util/api/RetryFunction.js";
|
7
7
|
import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
|
@@ -51,12 +51,12 @@ export declare class OpenAITranscriptionModel extends AbstractModel<OpenAITransc
|
|
51
51
|
constructor(settings: OpenAITranscriptionModelSettings);
|
52
52
|
readonly provider: "openai";
|
53
53
|
get modelName(): "whisper-1";
|
54
|
-
generateTranscriptionResponse(data: OpenAITranscriptionInput, options?:
|
54
|
+
generateTranscriptionResponse(data: OpenAITranscriptionInput, options?: ModelFunctionOptions<Partial<OpenAITranscriptionModelSettings & OpenAIModelSettings>>): PromiseLike<OpenAITranscriptionVerboseJsonResponse>;
|
55
55
|
extractTranscriptionText(response: OpenAITranscriptionVerboseJsonResponse): string;
|
56
56
|
private get apiKey();
|
57
57
|
callAPI<RESULT>(data: OpenAITranscriptionInput, options: {
|
58
58
|
responseFormat: OpenAITranscriptionResponseFormatType<RESULT>;
|
59
|
-
} &
|
59
|
+
} & ModelFunctionOptions<Partial<OpenAITranscriptionModelSettings & OpenAIModelSettings>>): Promise<RESULT>;
|
60
60
|
withSettings(additionalSettings: OpenAITranscriptionModelSettings): this;
|
61
61
|
}
|
62
62
|
declare const openAITranscriptionJsonSchema: z.ZodObject<{
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import z from "zod";
|
2
2
|
import { AbstractModel } from "../../../model-function/AbstractModel.js";
|
3
|
-
import {
|
3
|
+
import { ModelFunctionOptions } from "../../../model-function/ModelFunctionOptions.js";
|
4
4
|
import { GenerateJsonModel } from "../../../model-function/generate-json/GenerateJsonModel.js";
|
5
5
|
import { GenerateJsonOrTextModel } from "../../../model-function/generate-json/GenerateJsonOrTextModel.js";
|
6
6
|
import { DeltaEvent } from "../../../model-function/generate-text/DeltaEvent.js";
|
@@ -131,10 +131,10 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
131
131
|
countPromptTokens(messages: OpenAIChatMessage[]): Promise<number>;
|
132
132
|
callAPI<RESULT>(messages: Array<OpenAIChatMessage>, options: {
|
133
133
|
responseFormat: OpenAIChatResponseFormatType<RESULT>;
|
134
|
-
} &
|
134
|
+
} & ModelFunctionOptions<Partial<OpenAIChatCallSettings & OpenAIModelSettings & {
|
135
135
|
user?: string;
|
136
136
|
}>>): Promise<RESULT>;
|
137
|
-
generateTextResponse(prompt: OpenAIChatMessage[], options?:
|
137
|
+
generateTextResponse(prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings>): Promise<{
|
138
138
|
object: "chat.completion";
|
139
139
|
model: string;
|
140
140
|
id: string;
|
@@ -159,7 +159,7 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
159
159
|
}[];
|
160
160
|
}>;
|
161
161
|
extractText(response: OpenAIChatResponse): string;
|
162
|
-
generateDeltaStreamResponse(prompt: OpenAIChatMessage[], options?:
|
162
|
+
generateDeltaStreamResponse(prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings>): Promise<AsyncIterable<DeltaEvent<OpenAIChatDelta>>>;
|
163
163
|
extractTextDelta(fullDelta: OpenAIChatDelta): string | undefined;
|
164
164
|
/**
|
165
165
|
* JSON generation uses the OpenAI GPT function calling API.
|
@@ -168,7 +168,7 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
168
168
|
*
|
169
169
|
* @see https://platform.openai.com/docs/guides/gpt/function-calling
|
170
170
|
*/
|
171
|
-
generateJsonResponse(prompt: OpenAIChatSingleFunctionPrompt<unknown> | OpenAIChatAutoFunctionPrompt<Array<OpenAIFunctionDescription<unknown>>>, options?:
|
171
|
+
generateJsonResponse(prompt: OpenAIChatSingleFunctionPrompt<unknown> | OpenAIChatAutoFunctionPrompt<Array<OpenAIFunctionDescription<unknown>>>, options?: ModelFunctionOptions<OpenAIChatSettings> | undefined): PromiseLike<OpenAIChatResponse>;
|
172
172
|
extractJson(response: OpenAIChatResponse): unknown;
|
173
173
|
withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, OpenAIChatMessage[]>): PromptFormatTextGenerationModel<INPUT_PROMPT, OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings, this>;
|
174
174
|
withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { z } from "zod";
|
2
2
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
|
-
import {
|
3
|
+
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
4
4
|
import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-function/generate-image/ImageGenerationModel.js";
|
5
5
|
import { RetryFunction } from "../../util/api/RetryFunction.js";
|
6
6
|
import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
|
@@ -31,8 +31,8 @@ export declare class StabilityImageGenerationModel extends AbstractModel<Stabili
|
|
31
31
|
readonly provider: "stability";
|
32
32
|
get modelName(): string;
|
33
33
|
private get apiKey();
|
34
|
-
callAPI(input: StabilityImageGenerationPrompt, options?:
|
35
|
-
generateImageResponse(prompt: StabilityImageGenerationPrompt, options?:
|
34
|
+
callAPI(input: StabilityImageGenerationPrompt, options?: ModelFunctionOptions<StabilityImageGenerationModelSettings>): Promise<StabilityImageGenerationResponse>;
|
35
|
+
generateImageResponse(prompt: StabilityImageGenerationPrompt, options?: ModelFunctionOptions<StabilityImageGenerationModelSettings>): Promise<{
|
36
36
|
artifacts: {
|
37
37
|
seed: number;
|
38
38
|
base64: string;
|
package/package.json
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
import {
|
1
|
+
import { ModelFunctionOptions } from "../model-function/ModelFunctionOptions.js";
|
2
2
|
import { DeltaEvent } from "../model-function/generate-text/DeltaEvent.js";
|
3
3
|
import { TextGenerationModel, TextGenerationModelSettings } from "../model-function/generate-text/TextGenerationModel.js";
|
4
4
|
import { PromptFormat } from "./PromptFormat.js";
|
@@ -14,9 +14,9 @@ export declare class PromptFormatTextGenerationModel<PROMPT, MODEL_PROMPT, RESPO
|
|
14
14
|
get tokenizer(): MODEL["tokenizer"];
|
15
15
|
get contextWindowSize(): MODEL["contextWindowSize"];
|
16
16
|
get countPromptTokens(): MODEL["countPromptTokens"] extends undefined ? undefined : (prompt: PROMPT) => PromiseLike<number>;
|
17
|
-
generateTextResponse(prompt: PROMPT, options?:
|
17
|
+
generateTextResponse(prompt: PROMPT, options?: ModelFunctionOptions<SETTINGS>): PromiseLike<RESPONSE>;
|
18
18
|
extractText(response: RESPONSE): string;
|
19
|
-
get generateDeltaStreamResponse(): MODEL["generateDeltaStreamResponse"] extends undefined ? undefined : (prompt: PROMPT, options:
|
19
|
+
get generateDeltaStreamResponse(): MODEL["generateDeltaStreamResponse"] extends undefined ? undefined : (prompt: PROMPT, options: ModelFunctionOptions<SETTINGS>) => PromiseLike<AsyncIterable<DeltaEvent<FULL_DELTA>>>;
|
20
20
|
get extractTextDelta(): MODEL["extractTextDelta"];
|
21
21
|
withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, PROMPT>): PromptFormatTextGenerationModel<INPUT_PROMPT, PROMPT, RESPONSE, FULL_DELTA, SETTINGS, this>;
|
22
22
|
withSettings(additionalSettings: Partial<SETTINGS>): this;
|
package/run/ConsoleLogger.cjs
CHANGED
@@ -2,10 +2,7 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.ConsoleLogger = void 0;
|
4
4
|
class ConsoleLogger {
|
5
|
-
|
6
|
-
console.log(JSON.stringify(event, null, 2));
|
7
|
-
}
|
8
|
-
onRunFunctionFinished(event) {
|
5
|
+
onFunctionEvent(event) {
|
9
6
|
console.log(JSON.stringify(event, null, 2));
|
10
7
|
}
|
11
8
|
}
|
package/run/ConsoleLogger.d.ts
CHANGED
@@ -1,6 +1,5 @@
|
|
1
|
-
import {
|
2
|
-
import {
|
3
|
-
export declare class ConsoleLogger implements
|
4
|
-
|
5
|
-
onRunFunctionFinished(event: RunFunctionFinishedEvent): void;
|
1
|
+
import { FunctionEvent } from "./FunctionEvent.js";
|
2
|
+
import { FunctionObserver } from "./FunctionObserver.js";
|
3
|
+
export declare class ConsoleLogger implements FunctionObserver {
|
4
|
+
onFunctionEvent(event: FunctionEvent): void;
|
6
5
|
}
|
package/run/ConsoleLogger.js
CHANGED
package/run/DefaultRun.cjs
CHANGED
@@ -55,10 +55,7 @@ class DefaultRun {
|
|
55
55
|
this.costCalculators = costCalculators;
|
56
56
|
this.observers = [
|
57
57
|
{
|
58
|
-
|
59
|
-
this.events.push(event);
|
60
|
-
},
|
61
|
-
onRunFunctionFinished: (event) => {
|
58
|
+
onFunctionEvent: (event) => {
|
62
59
|
this.events.push(event);
|
63
60
|
},
|
64
61
|
},
|
package/run/DefaultRun.d.ts
CHANGED
@@ -1,22 +1,22 @@
|
|
1
1
|
import { CostCalculator } from "../cost/CostCalculator.js";
|
2
2
|
import { SuccessfulModelCall } from "../model-function/SuccessfulModelCall.js";
|
3
3
|
import { Run } from "./Run.js";
|
4
|
-
import {
|
5
|
-
import {
|
4
|
+
import { FunctionEvent } from "./FunctionEvent.js";
|
5
|
+
import { FunctionObserver } from "./FunctionObserver.js";
|
6
6
|
export declare class DefaultRun implements Run {
|
7
7
|
readonly runId: string;
|
8
8
|
readonly sessionId?: string;
|
9
9
|
readonly userId?: string;
|
10
10
|
readonly abortSignal?: AbortSignal;
|
11
11
|
readonly costCalculators: CostCalculator[];
|
12
|
-
readonly events:
|
13
|
-
readonly observers?:
|
12
|
+
readonly events: FunctionEvent[];
|
13
|
+
readonly observers?: FunctionObserver[];
|
14
14
|
constructor({ runId, sessionId, userId, abortSignal, observers, costCalculators, }?: {
|
15
15
|
runId?: string;
|
16
16
|
sessionId?: string;
|
17
17
|
userId?: string;
|
18
18
|
abortSignal?: AbortSignal;
|
19
|
-
observers?:
|
19
|
+
observers?: FunctionObserver[];
|
20
20
|
costCalculators?: CostCalculator[];
|
21
21
|
});
|
22
22
|
get successfulModelCalls(): Array<SuccessfulModelCall>;
|
package/run/DefaultRun.js
CHANGED
@@ -52,10 +52,7 @@ export class DefaultRun {
|
|
52
52
|
this.costCalculators = costCalculators;
|
53
53
|
this.observers = [
|
54
54
|
{
|
55
|
-
|
56
|
-
this.events.push(event);
|
57
|
-
},
|
58
|
-
onRunFunctionFinished: (event) => {
|
55
|
+
onFunctionEvent: (event) => {
|
59
56
|
this.events.push(event);
|
60
57
|
},
|
61
58
|
},
|
@@ -0,0 +1,51 @@
|
|
1
|
+
import { ExecuteToolFinishedEvent, ExecuteToolStartedEvent } from "../tool/ExecuteToolEvent.js";
|
2
|
+
import { ModelCallFinishedEvent, ModelCallStartedEvent } from "../model-function/ModelCallEvent.js";
|
3
|
+
export type BaseFunctionEvent = {
|
4
|
+
/**
|
5
|
+
* Unique identifier for the function call.
|
6
|
+
*/
|
7
|
+
callId: string | undefined;
|
8
|
+
/**
|
9
|
+
* Optional unique identifier for the function.
|
10
|
+
*/
|
11
|
+
functionId?: string | undefined;
|
12
|
+
/**
|
13
|
+
* Unique identifier for the run.
|
14
|
+
* Only available if the function is part of a run.
|
15
|
+
*/
|
16
|
+
runId?: string | undefined;
|
17
|
+
/**
|
18
|
+
* Unique identifier for the session.
|
19
|
+
* Only available if the function is part of a run with a session.
|
20
|
+
*/
|
21
|
+
sessionId?: string | undefined;
|
22
|
+
/**
|
23
|
+
* Unique identifier for the user.
|
24
|
+
* Only available if the function is part of a run with a user.
|
25
|
+
*/
|
26
|
+
userId?: string | undefined;
|
27
|
+
/**
|
28
|
+
* Timestamp of the event.
|
29
|
+
*/
|
30
|
+
timestamp: Date;
|
31
|
+
/**
|
32
|
+
* Type of the event. Defined in the subclasses.
|
33
|
+
*/
|
34
|
+
eventType: "started" | "finished";
|
35
|
+
/**
|
36
|
+
* Type of the function. Defined in the subclasses.
|
37
|
+
*/
|
38
|
+
functionType: string;
|
39
|
+
};
|
40
|
+
export type BaseFunctionStartedEvent = BaseFunctionEvent & {
|
41
|
+
eventType: "started";
|
42
|
+
startTimestamp: Date;
|
43
|
+
};
|
44
|
+
export type BaseFunctionFinishedEvent = BaseFunctionEvent & {
|
45
|
+
eventType: "finished";
|
46
|
+
status: "success" | "error" | "abort";
|
47
|
+
startTimestamp: Date;
|
48
|
+
finishTimestamp: Date;
|
49
|
+
durationInMs: number;
|
50
|
+
};
|
51
|
+
export type FunctionEvent = ModelCallStartedEvent | ExecuteToolStartedEvent | ModelCallFinishedEvent | ExecuteToolFinishedEvent;
|
@@ -1,4 +1,7 @@
|
|
1
|
-
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.FunctionEventSource = void 0;
|
4
|
+
class FunctionEventSource {
|
2
5
|
constructor({ observers, errorHandler, }) {
|
3
6
|
Object.defineProperty(this, "observers", {
|
4
7
|
enumerable: true,
|
@@ -15,20 +18,10 @@ export class RunFunctionEventSource {
|
|
15
18
|
this.observers = observers;
|
16
19
|
this.errorHandler = errorHandler ?? ((error) => console.error(error));
|
17
20
|
}
|
18
|
-
|
21
|
+
notify(event) {
|
19
22
|
for (const observer of this.observers) {
|
20
23
|
try {
|
21
|
-
observer.
|
22
|
-
}
|
23
|
-
catch (error) {
|
24
|
-
this.errorHandler(error);
|
25
|
-
}
|
26
|
-
}
|
27
|
-
}
|
28
|
-
notifyRunFunctionFinished(event) {
|
29
|
-
for (const observer of this.observers) {
|
30
|
-
try {
|
31
|
-
observer.onRunFunctionFinished?.(event);
|
24
|
+
observer.onFunctionEvent(event);
|
32
25
|
}
|
33
26
|
catch (error) {
|
34
27
|
this.errorHandler(error);
|
@@ -36,3 +29,4 @@ export class RunFunctionEventSource {
|
|
36
29
|
}
|
37
30
|
}
|
38
31
|
}
|
32
|
+
exports.FunctionEventSource = FunctionEventSource;
|
@@ -0,0 +1,12 @@
|
|
1
|
+
import { ErrorHandler } from "../util/ErrorHandler.js";
|
2
|
+
import { FunctionEvent } from "./FunctionEvent.js";
|
3
|
+
import { FunctionObserver } from "./FunctionObserver.js";
|
4
|
+
export declare class FunctionEventSource {
|
5
|
+
readonly observers: FunctionObserver[];
|
6
|
+
readonly errorHandler: ErrorHandler;
|
7
|
+
constructor({ observers, errorHandler, }: {
|
8
|
+
observers: FunctionObserver[];
|
9
|
+
errorHandler?: ErrorHandler;
|
10
|
+
});
|
11
|
+
notify(event: FunctionEvent): void;
|
12
|
+
}
|
@@ -1,7 +1,4 @@
|
|
1
|
-
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.RunFunctionEventSource = void 0;
|
4
|
-
class RunFunctionEventSource {
|
1
|
+
export class FunctionEventSource {
|
5
2
|
constructor({ observers, errorHandler, }) {
|
6
3
|
Object.defineProperty(this, "observers", {
|
7
4
|
enumerable: true,
|
@@ -18,20 +15,10 @@ class RunFunctionEventSource {
|
|
18
15
|
this.observers = observers;
|
19
16
|
this.errorHandler = errorHandler ?? ((error) => console.error(error));
|
20
17
|
}
|
21
|
-
|
18
|
+
notify(event) {
|
22
19
|
for (const observer of this.observers) {
|
23
20
|
try {
|
24
|
-
observer.
|
25
|
-
}
|
26
|
-
catch (error) {
|
27
|
-
this.errorHandler(error);
|
28
|
-
}
|
29
|
-
}
|
30
|
-
}
|
31
|
-
notifyRunFunctionFinished(event) {
|
32
|
-
for (const observer of this.observers) {
|
33
|
-
try {
|
34
|
-
observer.onRunFunctionFinished?.(event);
|
21
|
+
observer.onFunctionEvent(event);
|
35
22
|
}
|
36
23
|
catch (error) {
|
37
24
|
this.errorHandler(error);
|
@@ -39,4 +26,3 @@ class RunFunctionEventSource {
|
|
39
26
|
}
|
40
27
|
}
|
41
28
|
}
|
42
|
-
exports.RunFunctionEventSource = RunFunctionEventSource;
|
@@ -0,0 +1,19 @@
|
|
1
|
+
import { Run } from "./Run.js";
|
2
|
+
import { FunctionObserver } from "./FunctionObserver.js";
|
3
|
+
/**
|
4
|
+
* Additional settings for ModelFusion functions.
|
5
|
+
*/
|
6
|
+
export type FunctionOptions = {
|
7
|
+
/**
|
8
|
+
* Optional function identifier that is used in events to identify the function.
|
9
|
+
*/
|
10
|
+
functionId?: string;
|
11
|
+
/**
|
12
|
+
* Optional observers that are called when the function is invoked.
|
13
|
+
*/
|
14
|
+
observers?: Array<FunctionObserver>;
|
15
|
+
/**
|
16
|
+
* Optional run as part of which this function is called.
|
17
|
+
*/
|
18
|
+
run?: Run;
|
19
|
+
};
|
@@ -0,0 +1,12 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.getGlobalFunctionObservers = exports.setGlobalFunctionObservers = void 0;
|
4
|
+
let globalFunctionObservers = [];
|
5
|
+
function setGlobalFunctionObservers(functionObservers) {
|
6
|
+
globalFunctionObservers = functionObservers;
|
7
|
+
}
|
8
|
+
exports.setGlobalFunctionObservers = setGlobalFunctionObservers;
|
9
|
+
function getGlobalFunctionObservers() {
|
10
|
+
return globalFunctionObservers;
|
11
|
+
}
|
12
|
+
exports.getGlobalFunctionObservers = getGlobalFunctionObservers;
|