modelfusion 0.22.0 → 0.23.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/core/FunctionOptions.d.ts +14 -0
- package/core/GlobalFunctionLogging.cjs +12 -0
- package/core/GlobalFunctionLogging.d.ts +3 -0
- package/core/GlobalFunctionLogging.js +7 -0
- package/core/getFunctionCallLogger.cjs +74 -0
- package/core/getFunctionCallLogger.d.ts +3 -0
- package/core/getFunctionCallLogger.js +70 -0
- package/core/index.cjs +1 -1
- package/core/index.d.ts +1 -1
- package/core/index.js +1 -1
- package/model-function/AbstractModel.d.ts +1 -0
- package/model-function/Model.d.ts +6 -1
- package/model-function/ModelCallEvent.d.ts +21 -2
- package/model-function/embed-text/embedText.d.ts +2 -2
- package/model-function/executeCall.cjs +24 -17
- package/model-function/executeCall.d.ts +15 -13
- package/model-function/executeCall.js +22 -15
- package/model-function/generate-image/generateImage.d.ts +1 -1
- package/model-function/generate-json/JsonGenerationEvent.d.ts +16 -0
- package/model-function/generate-json/JsonGenerationModel.d.ts +13 -0
- package/model-function/generate-json/JsonOrTextGenerationModel.d.ts +23 -0
- package/model-function/generate-json/JsonTextGenerationModel.cjs +3 -0
- package/model-function/generate-json/JsonTextGenerationModel.d.ts +6 -5
- package/model-function/generate-json/JsonTextGenerationModel.js +3 -0
- package/model-function/generate-json/generateJson.cjs +1 -0
- package/model-function/generate-json/generateJson.d.ts +2 -2
- package/model-function/generate-json/generateJson.js +1 -0
- package/model-function/generate-json/generateJsonOrText.cjs +1 -0
- package/model-function/generate-json/generateJsonOrText.d.ts +2 -2
- package/model-function/generate-json/generateJsonOrText.js +1 -0
- package/model-function/generate-text/TextGenerationEvent.d.ts +5 -2
- package/model-function/generate-text/TextGenerationModel.d.ts +5 -0
- package/model-function/generate-text/generateText.cjs +1 -0
- package/model-function/generate-text/generateText.d.ts +1 -1
- package/model-function/generate-text/generateText.js +1 -0
- package/model-function/generate-text/streamText.cjs +9 -6
- package/model-function/generate-text/streamText.d.ts +5 -5
- package/model-function/generate-text/streamText.js +9 -6
- package/model-function/index.cjs +3 -2
- package/model-function/index.d.ts +3 -2
- package/model-function/index.js +3 -2
- package/model-function/synthesize-speech/SpeechSynthesisEvent.d.ts +2 -2
- package/model-function/synthesize-speech/SpeechSynthesisModel.d.ts +1 -1
- package/model-function/synthesize-speech/synthesizeSpeech.d.ts +1 -1
- package/model-function/transcribe-speech/TranscriptionModel.d.ts +1 -1
- package/model-function/transcribe-speech/transcribe.d.ts +1 -1
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +9 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +1 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +9 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.cjs +6 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +1 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.js +6 -0
- package/model-provider/cohere/CohereTextGenerationModel.cjs +20 -0
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +2 -1
- package/model-provider/cohere/CohereTextGenerationModel.js +20 -0
- package/model-provider/elevenlabs/ElevenLabsSpeechSynthesisModel.cjs +8 -0
- package/model-provider/elevenlabs/ElevenLabsSpeechSynthesisModel.d.ts +1 -0
- package/model-provider/elevenlabs/ElevenLabsSpeechSynthesisModel.js +8 -0
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +7 -0
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +1 -0
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +7 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +16 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +1 -0
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +16 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +6 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +1 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +6 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +31 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +6 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +31 -0
- package/model-provider/openai/OpenAIImageGenerationModel.cjs +8 -0
- package/model-provider/openai/OpenAIImageGenerationModel.d.ts +1 -0
- package/model-provider/openai/OpenAIImageGenerationModel.js +8 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +5 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +12 -11
- package/model-provider/openai/OpenAITextEmbeddingModel.js +5 -0
- package/model-provider/openai/OpenAITextGenerationModel.cjs +24 -0
- package/model-provider/openai/OpenAITextGenerationModel.d.ts +14 -8
- package/model-provider/openai/OpenAITextGenerationModel.js +24 -0
- package/model-provider/openai/OpenAITranscriptionModel.cjs +7 -0
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +2 -0
- package/model-provider/openai/OpenAITranscriptionModel.js +7 -0
- package/model-provider/openai/chat/OpenAIChatModel.cjs +20 -0
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +17 -11
- package/model-provider/openai/chat/OpenAIChatModel.js +20 -0
- package/model-provider/openai/chat/OpenAIChatPrompt.d.ts +2 -2
- package/model-provider/stability/StabilityImageGenerationModel.cjs +15 -0
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +1 -0
- package/model-provider/stability/StabilityImageGenerationModel.js +15 -0
- package/package.json +1 -1
- package/prompt/PromptFormatTextGenerationModel.cjs +3 -0
- package/prompt/PromptFormatTextGenerationModel.d.ts +1 -0
- package/prompt/PromptFormatTextGenerationModel.js +3 -0
- package/tool/executeTool.cjs +3 -0
- package/tool/executeTool.js +3 -0
- package/tool/useTool.d.ts +2 -2
- package/tool/useToolOrGenerateText.d.ts +2 -2
- package/core/ConsoleLogger.cjs +0 -9
- package/core/ConsoleLogger.d.ts +0 -5
- package/core/ConsoleLogger.js +0 -5
- package/model-function/generate-json/GenerateJsonModel.d.ts +0 -8
- package/model-function/generate-json/GenerateJsonOrTextModel.d.ts +0 -18
- /package/model-function/generate-json/{GenerateJsonModel.cjs → JsonGenerationModel.cjs} +0 -0
- /package/model-function/generate-json/{GenerateJsonModel.js → JsonGenerationModel.js} +0 -0
- /package/model-function/generate-json/{GenerateJsonOrTextModel.cjs → JsonOrTextGenerationModel.cjs} +0 -0
- /package/model-function/generate-json/{GenerateJsonOrTextModel.js → JsonOrTextGenerationModel.js} +0 -0
@@ -170,6 +170,19 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
|
|
170
170
|
call: async () => callOpenAIChatCompletionAPI(callSettings),
|
171
171
|
});
|
172
172
|
}
|
173
|
+
get settingsForEvent() {
|
174
|
+
const eventSettingProperties = [
|
175
|
+
"stopSequences",
|
176
|
+
"maxCompletionTokens",
|
177
|
+
"baseUrl",
|
178
|
+
"functions",
|
179
|
+
"functionCall",
|
180
|
+
"temperature",
|
181
|
+
"topP",
|
182
|
+
"n",
|
183
|
+
];
|
184
|
+
return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
|
185
|
+
}
|
173
186
|
generateTextResponse(prompt, options) {
|
174
187
|
return this.callAPI(prompt, {
|
175
188
|
...options,
|
@@ -211,6 +224,13 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
|
|
211
224
|
const jsonText = response.choices[0].message.function_call.arguments;
|
212
225
|
return secure_json_parse_1.default.parse(jsonText);
|
213
226
|
}
|
227
|
+
extractUsage(response) {
|
228
|
+
return {
|
229
|
+
promptTokens: response.usage.prompt_tokens,
|
230
|
+
completionTokens: response.usage.completion_tokens,
|
231
|
+
totalTokens: response.usage.total_tokens,
|
232
|
+
};
|
233
|
+
}
|
214
234
|
withPromptFormat(promptFormat) {
|
215
235
|
return new PromptFormatTextGenerationModel_js_1.PromptFormatTextGenerationModel({
|
216
236
|
model: this.withSettings({ stopSequences: promptFormat.stopSequences }),
|
@@ -1,8 +1,8 @@
|
|
1
1
|
import z from "zod";
|
2
2
|
import { AbstractModel } from "../../../model-function/AbstractModel.js";
|
3
3
|
import { ModelFunctionOptions } from "../../../model-function/ModelFunctionOptions.js";
|
4
|
-
import {
|
5
|
-
import {
|
4
|
+
import { JsonGenerationModel } from "../../../model-function/generate-json/JsonGenerationModel.js";
|
5
|
+
import { JsonOrTextGenerationModel } from "../../../model-function/generate-json/JsonOrTextGenerationModel.js";
|
6
6
|
import { DeltaEvent } from "../../../model-function/generate-text/DeltaEvent.js";
|
7
7
|
import { TextGenerationModel, TextGenerationModelSettings } from "../../../model-function/generate-text/TextGenerationModel.js";
|
8
8
|
import { PromptFormat } from "../../../prompt/PromptFormat.js";
|
@@ -117,7 +117,7 @@ export interface OpenAIChatSettings extends TextGenerationModelSettings, OpenAIM
|
|
117
117
|
* ),
|
118
118
|
* ]);
|
119
119
|
*/
|
120
|
-
export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings>,
|
120
|
+
export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings>, JsonGenerationModel<OpenAIChatSingleFunctionPrompt<unknown>, OpenAIChatResponse, OpenAIChatSettings>, JsonOrTextGenerationModel<OpenAIChatAutoFunctionPrompt<Array<OpenAIFunctionDescription<unknown>>>, OpenAIChatResponse, OpenAIChatSettings> {
|
121
121
|
constructor(settings: OpenAIChatSettings);
|
122
122
|
readonly provider: "openai";
|
123
123
|
get modelName(): "gpt-4" | "gpt-4-0314" | "gpt-4-0613" | "gpt-4-32k" | "gpt-4-32k-0314" | "gpt-4-32k-0613" | "gpt-3.5-turbo" | "gpt-3.5-turbo-0301" | "gpt-3.5-turbo-0613" | "gpt-3.5-turbo-16k" | "gpt-3.5-turbo-16k-0613";
|
@@ -134,16 +134,17 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
134
134
|
} & ModelFunctionOptions<Partial<OpenAIChatCallSettings & OpenAIModelSettings & {
|
135
135
|
user?: string;
|
136
136
|
}>>): Promise<RESULT>;
|
137
|
+
get settingsForEvent(): Partial<OpenAIChatSettings>;
|
137
138
|
generateTextResponse(prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings>): Promise<{
|
138
139
|
object: "chat.completion";
|
139
140
|
model: string;
|
140
|
-
id: string;
|
141
|
-
created: number;
|
142
141
|
usage: {
|
143
142
|
prompt_tokens: number;
|
144
143
|
total_tokens: number;
|
145
144
|
completion_tokens: number;
|
146
145
|
};
|
146
|
+
id: string;
|
147
|
+
created: number;
|
147
148
|
choices: {
|
148
149
|
message: {
|
149
150
|
content: string | null;
|
@@ -170,6 +171,11 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
170
171
|
*/
|
171
172
|
generateJsonResponse(prompt: OpenAIChatSingleFunctionPrompt<unknown> | OpenAIChatAutoFunctionPrompt<Array<OpenAIFunctionDescription<unknown>>>, options?: ModelFunctionOptions<OpenAIChatSettings> | undefined): PromiseLike<OpenAIChatResponse>;
|
172
173
|
extractJson(response: OpenAIChatResponse): unknown;
|
174
|
+
extractUsage(response: OpenAIChatResponse): {
|
175
|
+
promptTokens: number;
|
176
|
+
completionTokens: number;
|
177
|
+
totalTokens: number;
|
178
|
+
};
|
173
179
|
withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, OpenAIChatMessage[]>): PromptFormatTextGenerationModel<INPUT_PROMPT, OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings, this>;
|
174
180
|
withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
|
175
181
|
}
|
@@ -251,13 +257,13 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
251
257
|
}, "strip", z.ZodTypeAny, {
|
252
258
|
object: "chat.completion";
|
253
259
|
model: string;
|
254
|
-
id: string;
|
255
|
-
created: number;
|
256
260
|
usage: {
|
257
261
|
prompt_tokens: number;
|
258
262
|
total_tokens: number;
|
259
263
|
completion_tokens: number;
|
260
264
|
};
|
265
|
+
id: string;
|
266
|
+
created: number;
|
261
267
|
choices: {
|
262
268
|
message: {
|
263
269
|
content: string | null;
|
@@ -274,13 +280,13 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
274
280
|
}, {
|
275
281
|
object: "chat.completion";
|
276
282
|
model: string;
|
277
|
-
id: string;
|
278
|
-
created: number;
|
279
283
|
usage: {
|
280
284
|
prompt_tokens: number;
|
281
285
|
total_tokens: number;
|
282
286
|
completion_tokens: number;
|
283
287
|
};
|
288
|
+
id: string;
|
289
|
+
created: number;
|
284
290
|
choices: {
|
285
291
|
message: {
|
286
292
|
content: string | null;
|
@@ -309,13 +315,13 @@ export declare const OpenAIChatResponseFormat: {
|
|
309
315
|
handler: ResponseHandler<{
|
310
316
|
object: "chat.completion";
|
311
317
|
model: string;
|
312
|
-
id: string;
|
313
|
-
created: number;
|
314
318
|
usage: {
|
315
319
|
prompt_tokens: number;
|
316
320
|
total_tokens: number;
|
317
321
|
completion_tokens: number;
|
318
322
|
};
|
323
|
+
id: string;
|
324
|
+
created: number;
|
319
325
|
choices: {
|
320
326
|
message: {
|
321
327
|
content: string | null;
|
@@ -162,6 +162,19 @@ export class OpenAIChatModel extends AbstractModel {
|
|
162
162
|
call: async () => callOpenAIChatCompletionAPI(callSettings),
|
163
163
|
});
|
164
164
|
}
|
165
|
+
get settingsForEvent() {
|
166
|
+
const eventSettingProperties = [
|
167
|
+
"stopSequences",
|
168
|
+
"maxCompletionTokens",
|
169
|
+
"baseUrl",
|
170
|
+
"functions",
|
171
|
+
"functionCall",
|
172
|
+
"temperature",
|
173
|
+
"topP",
|
174
|
+
"n",
|
175
|
+
];
|
176
|
+
return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
|
177
|
+
}
|
165
178
|
generateTextResponse(prompt, options) {
|
166
179
|
return this.callAPI(prompt, {
|
167
180
|
...options,
|
@@ -203,6 +216,13 @@ export class OpenAIChatModel extends AbstractModel {
|
|
203
216
|
const jsonText = response.choices[0].message.function_call.arguments;
|
204
217
|
return SecureJSON.parse(jsonText);
|
205
218
|
}
|
219
|
+
extractUsage(response) {
|
220
|
+
return {
|
221
|
+
promptTokens: response.usage.prompt_tokens,
|
222
|
+
completionTokens: response.usage.completion_tokens,
|
223
|
+
totalTokens: response.usage.total_tokens,
|
224
|
+
};
|
225
|
+
}
|
206
226
|
withPromptFormat(promptFormat) {
|
207
227
|
return new PromptFormatTextGenerationModel({
|
208
228
|
model: this.withSettings({ stopSequences: promptFormat.stopSequences }),
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import z from "zod";
|
2
|
-
import {
|
2
|
+
import { JsonOrTextGenerationPrompt } from "../../../model-function/generate-json/JsonOrTextGenerationModel.js";
|
3
3
|
import { SchemaDefinition } from "../../../model-function/generate-json/SchemaDefinition.js";
|
4
4
|
import { Tool } from "../../../tool/Tool.js";
|
5
5
|
import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
|
@@ -76,7 +76,7 @@ export declare class OpenAIChatSingleFunctionPrompt<FUNCTION> {
|
|
76
76
|
};
|
77
77
|
}[];
|
78
78
|
}
|
79
|
-
export declare class OpenAIChatAutoFunctionPrompt<FUNCTIONS extends Array<OpenAIFunctionDescription<any>>> implements
|
79
|
+
export declare class OpenAIChatAutoFunctionPrompt<FUNCTIONS extends Array<OpenAIFunctionDescription<any>>> implements JsonOrTextGenerationPrompt<OpenAIChatResponse> {
|
80
80
|
readonly messages: OpenAIChatMessage[];
|
81
81
|
readonly fns: FUNCTIONS;
|
82
82
|
constructor({ messages, fns, }: {
|
@@ -64,6 +64,21 @@ class StabilityImageGenerationModel extends AbstractModel_js_1.AbstractModel {
|
|
64
64
|
call: async () => callStabilityImageGenerationAPI(callSettings),
|
65
65
|
});
|
66
66
|
}
|
67
|
+
get settingsForEvent() {
|
68
|
+
const eventSettingProperties = [
|
69
|
+
"baseUrl",
|
70
|
+
"height",
|
71
|
+
"width",
|
72
|
+
"cfgScale",
|
73
|
+
"clipGuidancePreset",
|
74
|
+
"sampler",
|
75
|
+
"samples",
|
76
|
+
"seed",
|
77
|
+
"steps",
|
78
|
+
"stylePreset",
|
79
|
+
];
|
80
|
+
return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
|
81
|
+
}
|
67
82
|
generateImageResponse(prompt, options) {
|
68
83
|
return this.callAPI(prompt, options);
|
69
84
|
}
|
@@ -32,6 +32,7 @@ export declare class StabilityImageGenerationModel extends AbstractModel<Stabili
|
|
32
32
|
get modelName(): string;
|
33
33
|
private get apiKey();
|
34
34
|
callAPI(input: StabilityImageGenerationPrompt, options?: ModelFunctionOptions<StabilityImageGenerationModelSettings>): Promise<StabilityImageGenerationResponse>;
|
35
|
+
get settingsForEvent(): Partial<StabilityImageGenerationModelSettings>;
|
35
36
|
generateImageResponse(prompt: StabilityImageGenerationPrompt, options?: ModelFunctionOptions<StabilityImageGenerationModelSettings>): Promise<{
|
36
37
|
artifacts: {
|
37
38
|
seed: number;
|
@@ -61,6 +61,21 @@ export class StabilityImageGenerationModel extends AbstractModel {
|
|
61
61
|
call: async () => callStabilityImageGenerationAPI(callSettings),
|
62
62
|
});
|
63
63
|
}
|
64
|
+
get settingsForEvent() {
|
65
|
+
const eventSettingProperties = [
|
66
|
+
"baseUrl",
|
67
|
+
"height",
|
68
|
+
"width",
|
69
|
+
"cfgScale",
|
70
|
+
"clipGuidancePreset",
|
71
|
+
"sampler",
|
72
|
+
"samples",
|
73
|
+
"seed",
|
74
|
+
"steps",
|
75
|
+
"stylePreset",
|
76
|
+
];
|
77
|
+
return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
|
78
|
+
}
|
64
79
|
generateImageResponse(prompt, options) {
|
65
80
|
return this.callAPI(prompt, options);
|
66
81
|
}
|
package/package.json
CHANGED
@@ -65,6 +65,9 @@ class PromptFormatTextGenerationModel {
|
|
65
65
|
promptFormat,
|
66
66
|
});
|
67
67
|
}
|
68
|
+
get settingsForEvent() {
|
69
|
+
return this.model.settingsForEvent;
|
70
|
+
}
|
68
71
|
withSettings(additionalSettings) {
|
69
72
|
return new PromptFormatTextGenerationModel({
|
70
73
|
model: this.model.withSettings(additionalSettings),
|
@@ -19,5 +19,6 @@ export declare class PromptFormatTextGenerationModel<PROMPT, MODEL_PROMPT, RESPO
|
|
19
19
|
get generateDeltaStreamResponse(): MODEL["generateDeltaStreamResponse"] extends undefined ? undefined : (prompt: PROMPT, options: ModelFunctionOptions<SETTINGS>) => PromiseLike<AsyncIterable<DeltaEvent<FULL_DELTA>>>;
|
20
20
|
get extractTextDelta(): MODEL["extractTextDelta"];
|
21
21
|
withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, PROMPT>): PromptFormatTextGenerationModel<INPUT_PROMPT, PROMPT, RESPONSE, FULL_DELTA, SETTINGS, this>;
|
22
|
+
get settingsForEvent(): Partial<SETTINGS>;
|
22
23
|
withSettings(additionalSettings: Partial<SETTINGS>): this;
|
23
24
|
}
|
@@ -62,6 +62,9 @@ export class PromptFormatTextGenerationModel {
|
|
62
62
|
promptFormat,
|
63
63
|
});
|
64
64
|
}
|
65
|
+
get settingsForEvent() {
|
66
|
+
return this.model.settingsForEvent;
|
67
|
+
}
|
65
68
|
withSettings(additionalSettings) {
|
66
69
|
return new PromptFormatTextGenerationModel({
|
67
70
|
model: this.model.withSettings(additionalSettings),
|
package/tool/executeTool.cjs
CHANGED
@@ -3,7 +3,9 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.executeTool = exports.ExecuteToolPromise = void 0;
|
4
4
|
const nanoid_1 = require("nanoid");
|
5
5
|
const FunctionEventSource_js_1 = require("../core/FunctionEventSource.cjs");
|
6
|
+
const GlobalFunctionLogging_js_1 = require("../core/GlobalFunctionLogging.cjs");
|
6
7
|
const GlobalFunctionObservers_js_1 = require("../core/GlobalFunctionObservers.cjs");
|
8
|
+
const getFunctionCallLogger_js_1 = require("../core/getFunctionCallLogger.cjs");
|
7
9
|
const DurationMeasurement_js_1 = require("../util/DurationMeasurement.cjs");
|
8
10
|
const AbortError_js_1 = require("../util/api/AbortError.cjs");
|
9
11
|
const runSafe_js_1 = require("../util/runSafe.cjs");
|
@@ -55,6 +57,7 @@ async function doExecuteTool(tool, input, options) {
|
|
55
57
|
const run = options?.run;
|
56
58
|
const eventSource = new FunctionEventSource_js_1.FunctionEventSource({
|
57
59
|
observers: [
|
60
|
+
...(0, getFunctionCallLogger_js_1.getFunctionCallLogger)(options?.logging ?? (0, GlobalFunctionLogging_js_1.getGlobalFunctionLogging)()),
|
58
61
|
...(0, GlobalFunctionObservers_js_1.getGlobalFunctionObservers)(),
|
59
62
|
...(run?.observers ?? []),
|
60
63
|
...(options?.observers ?? []),
|
package/tool/executeTool.js
CHANGED
@@ -1,6 +1,8 @@
|
|
1
1
|
import { nanoid as createId } from "nanoid";
|
2
2
|
import { FunctionEventSource } from "../core/FunctionEventSource.js";
|
3
|
+
import { getGlobalFunctionLogging } from "../core/GlobalFunctionLogging.js";
|
3
4
|
import { getGlobalFunctionObservers } from "../core/GlobalFunctionObservers.js";
|
5
|
+
import { getFunctionCallLogger } from "../core/getFunctionCallLogger.js";
|
4
6
|
import { startDurationMeasurement } from "../util/DurationMeasurement.js";
|
5
7
|
import { AbortError } from "../util/api/AbortError.js";
|
6
8
|
import { runSafe } from "../util/runSafe.js";
|
@@ -50,6 +52,7 @@ async function doExecuteTool(tool, input, options) {
|
|
50
52
|
const run = options?.run;
|
51
53
|
const eventSource = new FunctionEventSource({
|
52
54
|
observers: [
|
55
|
+
...getFunctionCallLogger(options?.logging ?? getGlobalFunctionLogging()),
|
53
56
|
...getGlobalFunctionObservers(),
|
54
57
|
...(run?.observers ?? []),
|
55
58
|
...(options?.observers ?? []),
|
package/tool/useTool.d.ts
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
import { ModelFunctionOptions } from "../model-function/ModelFunctionOptions.js";
|
2
|
-
import {
|
2
|
+
import { JsonGenerationModel, JsonGenerationModelSettings } from "../model-function/generate-json/JsonGenerationModel.js";
|
3
3
|
import { Tool } from "./Tool.js";
|
4
4
|
/**
|
5
5
|
* `useTool` uses `generateJson` to generate parameters for a tool and then executes the tool with the parameters.
|
@@ -8,7 +8,7 @@ import { Tool } from "./Tool.js";
|
|
8
8
|
* the parameters (`parameters` property, typed),
|
9
9
|
* and the result of the tool execution (`result` property, typed).
|
10
10
|
*/
|
11
|
-
export declare function useTool<PROMPT, RESPONSE, SETTINGS extends
|
11
|
+
export declare function useTool<PROMPT, RESPONSE, SETTINGS extends JsonGenerationModelSettings, TOOL extends Tool<any, any, any>>(model: JsonGenerationModel<PROMPT, RESPONSE, SETTINGS>, tool: TOOL, prompt: (tool: TOOL) => PROMPT, options?: ModelFunctionOptions<SETTINGS>): Promise<{
|
12
12
|
tool: TOOL["name"];
|
13
13
|
parameters: TOOL["inputSchema"];
|
14
14
|
result: Awaited<ReturnType<TOOL["execute"]>>;
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { ModelFunctionOptions } from "../model-function/ModelFunctionOptions.js";
|
2
|
-
import {
|
2
|
+
import { JsonOrTextGenerationModel, JsonOrTextGenerationModelSettings, JsonOrTextGenerationPrompt } from "../model-function/generate-json/JsonOrTextGenerationModel.js";
|
3
3
|
import { Tool } from "./Tool.js";
|
4
4
|
type ToolArray<T extends Tool<any, any, any>[]> = T;
|
5
5
|
type ToToolMap<T extends ToolArray<Tool<any, any, any>[]>> = {
|
@@ -14,7 +14,7 @@ type ToToolUnion<T> = {
|
|
14
14
|
} : never;
|
15
15
|
}[keyof T];
|
16
16
|
type ToOutputValue<TOOLS extends ToolArray<Tool<any, any, any>[]>> = ToToolUnion<ToToolMap<TOOLS>>;
|
17
|
-
export declare function useToolOrGenerateText<PROMPT, RESPONSE, SETTINGS extends
|
17
|
+
export declare function useToolOrGenerateText<PROMPT, RESPONSE, SETTINGS extends JsonOrTextGenerationModelSettings, TOOLS extends Array<Tool<any, any, any>>>(model: JsonOrTextGenerationModel<PROMPT, RESPONSE, SETTINGS>, tools: TOOLS, prompt: (tools: TOOLS) => PROMPT & JsonOrTextGenerationPrompt<RESPONSE>, options?: ModelFunctionOptions<SETTINGS>): Promise<{
|
18
18
|
tool: null;
|
19
19
|
parameters: null;
|
20
20
|
result: null;
|
package/core/ConsoleLogger.cjs
DELETED
package/core/ConsoleLogger.d.ts
DELETED
package/core/ConsoleLogger.js
DELETED
@@ -1,8 +0,0 @@
|
|
1
|
-
import { ModelFunctionOptions } from "../ModelFunctionOptions.js";
|
2
|
-
import { Model, ModelSettings } from "../Model.js";
|
3
|
-
export interface GenerateJsonModelSettings extends ModelSettings {
|
4
|
-
}
|
5
|
-
export interface GenerateJsonModel<PROMPT, RESPONSE, SETTINGS extends GenerateJsonModelSettings> extends Model<SETTINGS> {
|
6
|
-
generateJsonResponse(prompt: PROMPT, options?: ModelFunctionOptions<SETTINGS>): PromiseLike<RESPONSE>;
|
7
|
-
extractJson(response: RESPONSE): unknown;
|
8
|
-
}
|
@@ -1,18 +0,0 @@
|
|
1
|
-
import { ModelFunctionOptions } from "../ModelFunctionOptions.js";
|
2
|
-
import { Model, ModelSettings } from "../Model.js";
|
3
|
-
export interface GenerateJsonOrTextModelSettings extends ModelSettings {
|
4
|
-
}
|
5
|
-
export interface GenerateJsonOrTextPrompt<RESPONSE> {
|
6
|
-
extractJsonAndText(response: RESPONSE): {
|
7
|
-
schema: null;
|
8
|
-
value: null;
|
9
|
-
text: string;
|
10
|
-
} | {
|
11
|
-
schema: string;
|
12
|
-
value: unknown;
|
13
|
-
text: string | null;
|
14
|
-
};
|
15
|
-
}
|
16
|
-
export interface GenerateJsonOrTextModel<PROMPT, RESPONSE, SETTINGS extends GenerateJsonOrTextModelSettings> extends Model<SETTINGS> {
|
17
|
-
generateJsonResponse(prompt: PROMPT & GenerateJsonOrTextPrompt<RESPONSE>, options?: ModelFunctionOptions<SETTINGS>): PromiseLike<RESPONSE>;
|
18
|
-
}
|
File without changes
|
File without changes
|
/package/model-function/generate-json/{GenerateJsonOrTextModel.cjs → JsonOrTextGenerationModel.cjs}
RENAMED
File without changes
|
/package/model-function/generate-json/{GenerateJsonOrTextModel.js → JsonOrTextGenerationModel.js}
RENAMED
File without changes
|