modelfusion 0.130.1 → 0.131.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +58 -0
- package/README.md +19 -19
- package/core/api/postToApi.cjs +1 -1
- package/core/api/postToApi.js +1 -1
- package/core/schema/JsonSchemaProducer.d.ts +1 -1
- package/core/schema/Schema.d.ts +7 -7
- package/core/schema/TypeValidationError.cjs +5 -5
- package/core/schema/TypeValidationError.d.ts +4 -4
- package/core/schema/TypeValidationError.js +5 -5
- package/core/schema/UncheckedSchema.cjs +2 -2
- package/core/schema/UncheckedSchema.d.ts +5 -5
- package/core/schema/UncheckedSchema.js +2 -2
- package/core/schema/ZodSchema.cjs +5 -2
- package/core/schema/ZodSchema.d.ts +8 -8
- package/core/schema/ZodSchema.js +5 -2
- package/core/schema/parseJSON.cjs +6 -6
- package/core/schema/parseJSON.d.ts +3 -3
- package/core/schema/parseJSON.js +6 -6
- package/core/schema/validateTypes.cjs +13 -13
- package/core/schema/validateTypes.d.ts +9 -9
- package/core/schema/validateTypes.js +13 -13
- package/model-function/ModelCallEvent.d.ts +4 -4
- package/model-function/PromptTemplate.d.ts +2 -2
- package/model-function/{generate-structure/StructureFromTextGenerationModel.cjs → generate-object/ObjectFromTextGenerationModel.cjs} +8 -8
- package/model-function/{generate-structure/StructureFromTextGenerationModel.d.ts → generate-object/ObjectFromTextGenerationModel.d.ts} +6 -6
- package/model-function/{generate-structure/StructureFromTextGenerationModel.js → generate-object/ObjectFromTextGenerationModel.js} +6 -6
- package/model-function/{generate-structure/StructureFromTextPromptTemplate.d.ts → generate-object/ObjectFromTextPromptTemplate.d.ts} +4 -4
- package/model-function/{generate-structure/StructureFromTextStreamingModel.cjs → generate-object/ObjectFromTextStreamingModel.cjs} +10 -10
- package/model-function/generate-object/ObjectFromTextStreamingModel.d.ts +19 -0
- package/model-function/{generate-structure/StructureFromTextStreamingModel.js → generate-object/ObjectFromTextStreamingModel.js} +8 -8
- package/model-function/{generate-structure/StructureGenerationEvent.d.ts → generate-object/ObjectGenerationEvent.d.ts} +6 -6
- package/model-function/generate-object/ObjectGenerationModel.d.ts +24 -0
- package/model-function/{generate-structure/StructureParseError.cjs → generate-object/ObjectParseError.cjs} +5 -5
- package/model-function/{generate-structure/StructureParseError.d.ts → generate-object/ObjectParseError.d.ts} +1 -1
- package/model-function/{generate-structure/StructureParseError.js → generate-object/ObjectParseError.js} +3 -3
- package/model-function/generate-object/ObjectStream.cjs +43 -0
- package/model-function/generate-object/ObjectStream.d.ts +18 -0
- package/model-function/generate-object/ObjectStream.js +38 -0
- package/model-function/generate-object/ObjectStreamingEvent.d.ts +7 -0
- package/model-function/{generate-structure/StructureValidationError.cjs → generate-object/ObjectValidationError.cjs} +5 -5
- package/model-function/{generate-structure/StructureValidationError.d.ts → generate-object/ObjectValidationError.d.ts} +1 -1
- package/model-function/{generate-structure/StructureValidationError.js → generate-object/ObjectValidationError.js} +3 -3
- package/model-function/{generate-structure/generateStructure.cjs → generate-object/generateObject.cjs} +11 -12
- package/model-function/generate-object/generateObject.d.ts +56 -0
- package/model-function/{generate-structure/generateStructure.js → generate-object/generateObject.js} +9 -10
- package/model-function/{generate-structure → generate-object}/index.cjs +12 -11
- package/model-function/generate-object/index.d.ts +12 -0
- package/model-function/generate-object/index.js +12 -0
- package/model-function/{generate-structure/jsonStructurePrompt.cjs → generate-object/jsonObjectPrompt.cjs} +6 -6
- package/model-function/{generate-structure/jsonStructurePrompt.d.ts → generate-object/jsonObjectPrompt.d.ts} +6 -6
- package/model-function/{generate-structure/jsonStructurePrompt.js → generate-object/jsonObjectPrompt.js} +5 -5
- package/model-function/generate-object/streamObject.cjs +80 -0
- package/model-function/generate-object/streamObject.d.ts +57 -0
- package/model-function/generate-object/streamObject.js +76 -0
- package/model-function/generate-text/PromptTemplateTextGenerationModel.cjs +3 -3
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +3 -3
- package/model-function/generate-text/PromptTemplateTextGenerationModel.js +3 -3
- package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +3 -3
- package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +3 -3
- package/model-function/generate-text/PromptTemplateTextStreamingModel.js +3 -3
- package/model-function/generate-text/TextGenerationPromptTemplate.d.ts +1 -1
- package/model-function/index.cjs +1 -1
- package/model-function/index.d.ts +1 -1
- package/model-function/index.js +1 -1
- package/model-provider/cohere/CohereTextGenerationModel.cjs +1 -1
- package/model-provider/cohere/CohereTextGenerationModel.js +1 -1
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +1 -1
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +1 -1
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +1 -1
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +1 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +1 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +1 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +1 -1
- package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +5 -5
- package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +3 -3
- package/model-provider/llamacpp/LlamaCppCompletionModel.js +5 -5
- package/model-provider/mistral/MistralChatModel.cjs +1 -1
- package/model-provider/mistral/MistralChatModel.js +1 -1
- package/model-provider/ollama/OllamaChatModel.cjs +7 -7
- package/model-provider/ollama/OllamaChatModel.d.ts +3 -3
- package/model-provider/ollama/OllamaChatModel.js +7 -7
- package/model-provider/ollama/OllamaCompletionModel.cjs +7 -7
- package/model-provider/ollama/OllamaCompletionModel.d.ts +3 -3
- package/model-provider/ollama/OllamaCompletionModel.js +7 -7
- package/model-provider/ollama/OllamaCompletionModel.test.cjs +8 -6
- package/model-provider/ollama/OllamaCompletionModel.test.js +8 -6
- package/model-provider/openai/AbstractOpenAIChatModel.cjs +1 -1
- package/model-provider/openai/AbstractOpenAIChatModel.js +1 -1
- package/model-provider/openai/AbstractOpenAICompletionModel.cjs +1 -1
- package/model-provider/openai/AbstractOpenAICompletionModel.js +1 -1
- package/model-provider/openai/{OpenAIChatFunctionCallStructureGenerationModel.cjs → OpenAIChatFunctionCallObjectGenerationModel.cjs} +12 -12
- package/model-provider/openai/{OpenAIChatFunctionCallStructureGenerationModel.d.ts → OpenAIChatFunctionCallObjectGenerationModel.d.ts} +10 -10
- package/model-provider/openai/{OpenAIChatFunctionCallStructureGenerationModel.js → OpenAIChatFunctionCallObjectGenerationModel.js} +10 -10
- package/model-provider/openai/OpenAIChatModel.cjs +7 -7
- package/model-provider/openai/OpenAIChatModel.d.ts +6 -6
- package/model-provider/openai/OpenAIChatModel.js +7 -7
- package/model-provider/openai/OpenAIChatModel.test.cjs +7 -5
- package/model-provider/openai/OpenAIChatModel.test.js +7 -5
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +4 -4
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +3 -3
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +4 -4
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +3 -3
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +3 -3
- package/package.json +1 -1
- package/tool/WebSearchTool.cjs +1 -1
- package/tool/WebSearchTool.js +1 -1
- package/tool/generate-tool-call/generateToolCall.cjs +1 -1
- package/tool/generate-tool-call/generateToolCall.js +1 -1
- package/tool/generate-tool-calls/generateToolCalls.cjs +1 -1
- package/tool/generate-tool-calls/generateToolCalls.js +1 -1
- package/{model-function/generate-structure → util}/fixJson.test.cjs +1 -1
- package/{model-function/generate-structure → util}/fixJson.test.js +1 -1
- package/util/isDeepEqualData.cjs +1 -1
- package/util/isDeepEqualData.d.ts +1 -1
- package/util/isDeepEqualData.js +1 -1
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.cjs +1 -1
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.js +1 -1
- package/vector-index/memory/MemoryVectorIndex.cjs +1 -1
- package/vector-index/memory/MemoryVectorIndex.js +1 -1
- package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +0 -19
- package/model-function/generate-structure/StructureGenerationModel.d.ts +0 -24
- package/model-function/generate-structure/StructureStreamingEvent.d.ts +0 -7
- package/model-function/generate-structure/generateStructure.d.ts +0 -56
- package/model-function/generate-structure/index.d.ts +0 -11
- package/model-function/generate-structure/index.js +0 -11
- package/model-function/generate-structure/streamStructure.cjs +0 -61
- package/model-function/generate-structure/streamStructure.d.ts +0 -67
- package/model-function/generate-structure/streamStructure.js +0 -57
- /package/model-function/{generate-structure/StructureFromTextPromptTemplate.cjs → generate-object/ObjectFromTextPromptTemplate.cjs} +0 -0
- /package/model-function/{generate-structure/StructureFromTextPromptTemplate.js → generate-object/ObjectFromTextPromptTemplate.js} +0 -0
- /package/model-function/{generate-structure/StructureGenerationEvent.cjs → generate-object/ObjectGenerationEvent.cjs} +0 -0
- /package/model-function/{generate-structure/StructureGenerationEvent.js → generate-object/ObjectGenerationEvent.js} +0 -0
- /package/model-function/{generate-structure/StructureGenerationModel.cjs → generate-object/ObjectGenerationModel.cjs} +0 -0
- /package/model-function/{generate-structure/StructureGenerationModel.js → generate-object/ObjectGenerationModel.js} +0 -0
- /package/model-function/{generate-structure/StructureStreamingEvent.cjs → generate-object/ObjectStreamingEvent.cjs} +0 -0
- /package/model-function/{generate-structure/StructureStreamingEvent.js → generate-object/ObjectStreamingEvent.js} +0 -0
- /package/{model-function/generate-structure → util}/fixJson.cjs +0 -0
- /package/{model-function/generate-structure → util}/fixJson.d.ts +0 -0
- /package/{model-function/generate-structure → util}/fixJson.js +0 -0
- /package/{model-function/generate-structure → util}/fixJson.test.d.ts +0 -0
- /package/{model-function/generate-structure → util}/parsePartialJson.cjs +0 -0
- /package/{model-function/generate-structure → util}/parsePartialJson.d.ts +0 -0
- /package/{model-function/generate-structure → util}/parsePartialJson.js +0 -0
@@ -86,7 +86,7 @@ class AbstractOpenAIChatModel extends AbstractModel_js_1.AbstractModel {
|
|
86
86
|
}
|
87
87
|
restoreGeneratedTexts(rawResponse) {
|
88
88
|
return this.processTextGenerationResponse((0, validateTypes_js_1.validateTypes)({
|
89
|
-
|
89
|
+
value: rawResponse,
|
90
90
|
schema: (0, ZodSchema_js_1.zodSchema)(openAIChatResponseSchema),
|
91
91
|
}));
|
92
92
|
}
|
@@ -83,7 +83,7 @@ export class AbstractOpenAIChatModel extends AbstractModel {
|
|
83
83
|
}
|
84
84
|
restoreGeneratedTexts(rawResponse) {
|
85
85
|
return this.processTextGenerationResponse(validateTypes({
|
86
|
-
|
86
|
+
value: rawResponse,
|
87
87
|
schema: zodSchema(openAIChatResponseSchema),
|
88
88
|
}));
|
89
89
|
}
|
@@ -75,7 +75,7 @@ class AbstractOpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
|
|
75
75
|
}
|
76
76
|
restoreGeneratedTexts(rawResponse) {
|
77
77
|
return this.processTextGenerationResponse((0, validateTypes_js_1.validateTypes)({
|
78
|
-
|
78
|
+
value: rawResponse,
|
79
79
|
schema: (0, ZodSchema_js_1.zodSchema)(OpenAICompletionResponseSchema),
|
80
80
|
}));
|
81
81
|
}
|
@@ -72,7 +72,7 @@ export class AbstractOpenAICompletionModel extends AbstractModel {
|
|
72
72
|
}
|
73
73
|
restoreGeneratedTexts(rawResponse) {
|
74
74
|
return this.processTextGenerationResponse(validateTypes({
|
75
|
-
|
75
|
+
value: rawResponse,
|
76
76
|
schema: zodSchema(OpenAICompletionResponseSchema),
|
77
77
|
}));
|
78
78
|
}
|
@@ -3,13 +3,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
4
4
|
};
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
6
|
-
exports.
|
6
|
+
exports.OpenAIChatFunctionCallObjectGenerationModel = void 0;
|
7
7
|
const secure_json_parse_1 = __importDefault(require("secure-json-parse"));
|
8
|
-
const
|
9
|
-
const parsePartialJson_js_1 = require("../../
|
8
|
+
const ObjectParseError_js_1 = require("../../model-function/generate-object/ObjectParseError.cjs");
|
9
|
+
const parsePartialJson_js_1 = require("../../util/parsePartialJson.cjs");
|
10
10
|
const AbstractOpenAIChatModel_js_1 = require("./AbstractOpenAIChatModel.cjs");
|
11
11
|
const OpenAIChatPromptTemplate_js_1 = require("./OpenAIChatPromptTemplate.cjs");
|
12
|
-
class
|
12
|
+
class OpenAIChatFunctionCallObjectGenerationModel {
|
13
13
|
constructor({ model, fnName, fnDescription, promptTemplate, }) {
|
14
14
|
Object.defineProperty(this, "model", {
|
15
15
|
enumerable: true,
|
@@ -76,7 +76,7 @@ class OpenAIChatFunctionCallStructureGenerationModel {
|
|
76
76
|
return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.chat)());
|
77
77
|
}
|
78
78
|
withPromptTemplate(promptTemplate) {
|
79
|
-
return new
|
79
|
+
return new OpenAIChatFunctionCallObjectGenerationModel({
|
80
80
|
model: this.model,
|
81
81
|
fnName: this.fnName,
|
82
82
|
fnDescription: this.fnDescription,
|
@@ -84,7 +84,7 @@ class OpenAIChatFunctionCallStructureGenerationModel {
|
|
84
84
|
});
|
85
85
|
}
|
86
86
|
withSettings(additionalSettings) {
|
87
|
-
return new
|
87
|
+
return new OpenAIChatFunctionCallObjectGenerationModel({
|
88
88
|
model: this.model.withSettings(additionalSettings),
|
89
89
|
fnName: this.fnName,
|
90
90
|
fnDescription: this.fnDescription,
|
@@ -98,7 +98,7 @@ class OpenAIChatFunctionCallStructureGenerationModel {
|
|
98
98
|
*
|
99
99
|
* @see https://platform.openai.com/docs/guides/gpt/function-calling
|
100
100
|
*/
|
101
|
-
async
|
101
|
+
async doGenerateObject(schema, prompt, // first argument of the function
|
102
102
|
options) {
|
103
103
|
const expandedPrompt = this.promptTemplate.format(prompt);
|
104
104
|
const rawResponse = await this.model
|
@@ -129,13 +129,13 @@ class OpenAIChatFunctionCallStructureGenerationModel {
|
|
129
129
|
};
|
130
130
|
}
|
131
131
|
catch (error) {
|
132
|
-
throw new
|
132
|
+
throw new ObjectParseError_js_1.ObjectParseError({
|
133
133
|
valueText,
|
134
134
|
cause: error,
|
135
135
|
});
|
136
136
|
}
|
137
137
|
}
|
138
|
-
async
|
138
|
+
async doStreamObject(schema, prompt, // first argument of the function
|
139
139
|
options) {
|
140
140
|
const expandedPrompt = this.promptTemplate.format(prompt);
|
141
141
|
return this.model.callAPI(expandedPrompt, options, {
|
@@ -150,7 +150,7 @@ class OpenAIChatFunctionCallStructureGenerationModel {
|
|
150
150
|
],
|
151
151
|
});
|
152
152
|
}
|
153
|
-
|
153
|
+
extractObjectTextDelta(delta) {
|
154
154
|
const chunk = delta;
|
155
155
|
if (chunk.object !== "chat.completion.chunk") {
|
156
156
|
return undefined;
|
@@ -162,8 +162,8 @@ class OpenAIChatFunctionCallStructureGenerationModel {
|
|
162
162
|
}
|
163
163
|
return firstChoice.delta.function_call?.arguments;
|
164
164
|
}
|
165
|
-
|
165
|
+
parseAccumulatedObjectText(accumulatedText) {
|
166
166
|
return (0, parsePartialJson_js_1.parsePartialJson)(accumulatedText);
|
167
167
|
}
|
168
168
|
}
|
169
|
-
exports.
|
169
|
+
exports.OpenAIChatFunctionCallObjectGenerationModel = OpenAIChatFunctionCallObjectGenerationModel;
|
@@ -1,11 +1,11 @@
|
|
1
1
|
import { FunctionCallOptions } from "../../core/FunctionOptions.js";
|
2
2
|
import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
|
3
3
|
import { Schema } from "../../core/schema/Schema.js";
|
4
|
-
import {
|
4
|
+
import { ObjectStreamingModel } from "../../model-function/generate-object/ObjectGenerationModel.js";
|
5
5
|
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
6
6
|
import { OpenAIChatPrompt } from "./AbstractOpenAIChatModel.js";
|
7
7
|
import { OpenAIChatModel, OpenAIChatSettings } from "./OpenAIChatModel.js";
|
8
|
-
export declare class
|
8
|
+
export declare class OpenAIChatFunctionCallObjectGenerationModel<PROMPT_TEMPLATE extends TextGenerationPromptTemplate<unknown, OpenAIChatPrompt>> implements ObjectStreamingModel<Parameters<PROMPT_TEMPLATE["format"]>[0], // first argument of the function
|
9
9
|
OpenAIChatSettings> {
|
10
10
|
readonly model: OpenAIChatModel;
|
11
11
|
readonly fnName: string;
|
@@ -107,16 +107,16 @@ OpenAIChatSettings> {
|
|
107
107
|
/**
|
108
108
|
* Returns this model with a text prompt template.
|
109
109
|
*/
|
110
|
-
withTextPrompt():
|
110
|
+
withTextPrompt(): OpenAIChatFunctionCallObjectGenerationModel<TextGenerationPromptTemplate<string, OpenAIChatPrompt>>;
|
111
111
|
/**
|
112
112
|
* Returns this model with an instruction prompt template.
|
113
113
|
*/
|
114
|
-
withInstructionPrompt():
|
114
|
+
withInstructionPrompt(): OpenAIChatFunctionCallObjectGenerationModel<TextGenerationPromptTemplate<import("../../index.js").InstructionPrompt, OpenAIChatPrompt>>;
|
115
115
|
/**
|
116
116
|
* Returns this model with a chat prompt template.
|
117
117
|
*/
|
118
|
-
withChatPrompt():
|
119
|
-
withPromptTemplate<TARGET_PROMPT_FORMAT extends TextGenerationPromptTemplate<unknown, OpenAIChatPrompt>>(promptTemplate: TARGET_PROMPT_FORMAT):
|
118
|
+
withChatPrompt(): OpenAIChatFunctionCallObjectGenerationModel<TextGenerationPromptTemplate<import("../../index.js").ChatPrompt, OpenAIChatPrompt>>;
|
119
|
+
withPromptTemplate<TARGET_PROMPT_FORMAT extends TextGenerationPromptTemplate<unknown, OpenAIChatPrompt>>(promptTemplate: TARGET_PROMPT_FORMAT): OpenAIChatFunctionCallObjectGenerationModel<TARGET_PROMPT_FORMAT>;
|
120
120
|
withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
|
121
121
|
/**
|
122
122
|
* JSON generation uses the OpenAI GPT function calling API.
|
@@ -125,7 +125,7 @@ OpenAIChatSettings> {
|
|
125
125
|
*
|
126
126
|
* @see https://platform.openai.com/docs/guides/gpt/function-calling
|
127
127
|
*/
|
128
|
-
|
128
|
+
doGenerateObject(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_TEMPLATE["format"]>[0], // first argument of the function
|
129
129
|
options: FunctionCallOptions): Promise<{
|
130
130
|
rawResponse: {
|
131
131
|
object: "chat.completion";
|
@@ -168,7 +168,7 @@ OpenAIChatSettings> {
|
|
168
168
|
totalTokens: number;
|
169
169
|
};
|
170
170
|
}>;
|
171
|
-
|
171
|
+
doStreamObject(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_TEMPLATE["format"]>[0], // first argument of the function
|
172
172
|
options: FunctionCallOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
|
173
173
|
object: string;
|
174
174
|
id: string;
|
@@ -196,6 +196,6 @@ OpenAIChatSettings> {
|
|
196
196
|
model?: string | undefined;
|
197
197
|
system_fingerprint?: string | null | undefined;
|
198
198
|
}>>>;
|
199
|
-
|
200
|
-
|
199
|
+
extractObjectTextDelta(delta: unknown): string | undefined;
|
200
|
+
parseAccumulatedObjectText(accumulatedText: string): unknown;
|
201
201
|
}
|
@@ -1,9 +1,9 @@
|
|
1
1
|
import SecureJSON from "secure-json-parse";
|
2
|
-
import {
|
3
|
-
import { parsePartialJson } from "../../
|
2
|
+
import { ObjectParseError } from "../../model-function/generate-object/ObjectParseError.js";
|
3
|
+
import { parsePartialJson } from "../../util/parsePartialJson.js";
|
4
4
|
import { OpenAIChatResponseFormat, } from "./AbstractOpenAIChatModel.js";
|
5
5
|
import { chat, instruction, text } from "./OpenAIChatPromptTemplate.js";
|
6
|
-
export class
|
6
|
+
export class OpenAIChatFunctionCallObjectGenerationModel {
|
7
7
|
constructor({ model, fnName, fnDescription, promptTemplate, }) {
|
8
8
|
Object.defineProperty(this, "model", {
|
9
9
|
enumerable: true,
|
@@ -70,7 +70,7 @@ export class OpenAIChatFunctionCallStructureGenerationModel {
|
|
70
70
|
return this.withPromptTemplate(chat());
|
71
71
|
}
|
72
72
|
withPromptTemplate(promptTemplate) {
|
73
|
-
return new
|
73
|
+
return new OpenAIChatFunctionCallObjectGenerationModel({
|
74
74
|
model: this.model,
|
75
75
|
fnName: this.fnName,
|
76
76
|
fnDescription: this.fnDescription,
|
@@ -78,7 +78,7 @@ export class OpenAIChatFunctionCallStructureGenerationModel {
|
|
78
78
|
});
|
79
79
|
}
|
80
80
|
withSettings(additionalSettings) {
|
81
|
-
return new
|
81
|
+
return new OpenAIChatFunctionCallObjectGenerationModel({
|
82
82
|
model: this.model.withSettings(additionalSettings),
|
83
83
|
fnName: this.fnName,
|
84
84
|
fnDescription: this.fnDescription,
|
@@ -92,7 +92,7 @@ export class OpenAIChatFunctionCallStructureGenerationModel {
|
|
92
92
|
*
|
93
93
|
* @see https://platform.openai.com/docs/guides/gpt/function-calling
|
94
94
|
*/
|
95
|
-
async
|
95
|
+
async doGenerateObject(schema, prompt, // first argument of the function
|
96
96
|
options) {
|
97
97
|
const expandedPrompt = this.promptTemplate.format(prompt);
|
98
98
|
const rawResponse = await this.model
|
@@ -123,13 +123,13 @@ export class OpenAIChatFunctionCallStructureGenerationModel {
|
|
123
123
|
};
|
124
124
|
}
|
125
125
|
catch (error) {
|
126
|
-
throw new
|
126
|
+
throw new ObjectParseError({
|
127
127
|
valueText,
|
128
128
|
cause: error,
|
129
129
|
});
|
130
130
|
}
|
131
131
|
}
|
132
|
-
async
|
132
|
+
async doStreamObject(schema, prompt, // first argument of the function
|
133
133
|
options) {
|
134
134
|
const expandedPrompt = this.promptTemplate.format(prompt);
|
135
135
|
return this.model.callAPI(expandedPrompt, options, {
|
@@ -144,7 +144,7 @@ export class OpenAIChatFunctionCallStructureGenerationModel {
|
|
144
144
|
],
|
145
145
|
});
|
146
146
|
}
|
147
|
-
|
147
|
+
extractObjectTextDelta(delta) {
|
148
148
|
const chunk = delta;
|
149
149
|
if (chunk.object !== "chat.completion.chunk") {
|
150
150
|
return undefined;
|
@@ -156,7 +156,7 @@ export class OpenAIChatFunctionCallStructureGenerationModel {
|
|
156
156
|
}
|
157
157
|
return firstChoice.delta.function_call?.arguments;
|
158
158
|
}
|
159
|
-
|
159
|
+
parseAccumulatedObjectText(accumulatedText) {
|
160
160
|
return parsePartialJson(accumulatedText);
|
161
161
|
}
|
162
162
|
}
|
@@ -1,11 +1,11 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.OpenAIChatModel = exports.calculateOpenAIChatCostInMillicents = exports.isOpenAIChatModel = exports.getOpenAIChatModelInformation = exports.OPENAI_CHAT_MODELS = void 0;
|
4
|
-
const
|
4
|
+
const ObjectFromTextStreamingModel_js_1 = require("../../model-function/generate-object/ObjectFromTextStreamingModel.cjs");
|
5
5
|
const PromptTemplateFullTextModel_js_1 = require("../../model-function/generate-text/PromptTemplateFullTextModel.cjs");
|
6
6
|
const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
|
7
7
|
const AbstractOpenAIChatModel_js_1 = require("./AbstractOpenAIChatModel.cjs");
|
8
|
-
const
|
8
|
+
const OpenAIChatFunctionCallObjectGenerationModel_js_1 = require("./OpenAIChatFunctionCallObjectGenerationModel.cjs");
|
9
9
|
const OpenAIChatPromptTemplate_js_1 = require("./OpenAIChatPromptTemplate.cjs");
|
10
10
|
const TikTokenTokenizer_js_1 = require("./TikTokenTokenizer.cjs");
|
11
11
|
const countOpenAIChatMessageTokens_js_1 = require("./countOpenAIChatMessageTokens.cjs");
|
@@ -210,21 +210,21 @@ class OpenAIChatModel extends AbstractOpenAIChatModel_js_1.AbstractOpenAIChatMod
|
|
210
210
|
];
|
211
211
|
return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
|
212
212
|
}
|
213
|
-
|
214
|
-
return new
|
213
|
+
asFunctionCallObjectGenerationModel({ fnName, fnDescription, }) {
|
214
|
+
return new OpenAIChatFunctionCallObjectGenerationModel_js_1.OpenAIChatFunctionCallObjectGenerationModel({
|
215
215
|
model: this,
|
216
216
|
fnName,
|
217
217
|
fnDescription,
|
218
218
|
promptTemplate: (0, OpenAIChatPromptTemplate_js_1.identity)(),
|
219
219
|
});
|
220
220
|
}
|
221
|
-
|
221
|
+
asObjectGenerationModel(promptTemplate) {
|
222
222
|
return "adaptModel" in promptTemplate
|
223
|
-
? new
|
223
|
+
? new ObjectFromTextStreamingModel_js_1.ObjectFromTextStreamingModel({
|
224
224
|
model: promptTemplate.adaptModel(this),
|
225
225
|
template: promptTemplate,
|
226
226
|
})
|
227
|
-
: new
|
227
|
+
: new ObjectFromTextStreamingModel_js_1.ObjectFromTextStreamingModel({
|
228
228
|
model: this,
|
229
229
|
template: promptTemplate,
|
230
230
|
});
|
@@ -1,12 +1,12 @@
|
|
1
|
-
import {
|
2
|
-
import {
|
1
|
+
import { FlexibleObjectFromTextPromptTemplate, ObjectFromTextPromptTemplate } from "../../model-function/generate-object/ObjectFromTextPromptTemplate.js";
|
2
|
+
import { ObjectFromTextStreamingModel } from "../../model-function/generate-object/ObjectFromTextStreamingModel.js";
|
3
3
|
import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
|
4
4
|
import { TextStreamingBaseModel, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
|
5
5
|
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
6
6
|
import { ToolCallGenerationModel } from "../../tool/generate-tool-call/ToolCallGenerationModel.js";
|
7
7
|
import { ToolCallsGenerationModel } from "../../tool/generate-tool-calls/ToolCallsGenerationModel.js";
|
8
8
|
import { AbstractOpenAIChatModel, AbstractOpenAIChatSettings, OpenAIChatPrompt, OpenAIChatResponse } from "./AbstractOpenAIChatModel.js";
|
9
|
-
import {
|
9
|
+
import { OpenAIChatFunctionCallObjectGenerationModel } from "./OpenAIChatFunctionCallObjectGenerationModel.js";
|
10
10
|
import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
|
11
11
|
export declare const OPENAI_CHAT_MODELS: {
|
12
12
|
"gpt-4": {
|
@@ -136,11 +136,11 @@ export declare class OpenAIChatModel extends AbstractOpenAIChatModel<OpenAIChatS
|
|
136
136
|
*/
|
137
137
|
countPromptTokens(messages: OpenAIChatPrompt): Promise<number>;
|
138
138
|
get settingsForEvent(): Partial<OpenAIChatSettings>;
|
139
|
-
|
139
|
+
asFunctionCallObjectGenerationModel({ fnName, fnDescription, }: {
|
140
140
|
fnName: string;
|
141
141
|
fnDescription?: string;
|
142
|
-
}):
|
143
|
-
|
142
|
+
}): OpenAIChatFunctionCallObjectGenerationModel<TextGenerationPromptTemplate<OpenAIChatPrompt, OpenAIChatPrompt>>;
|
143
|
+
asObjectGenerationModel<INPUT_PROMPT, OpenAIChatPrompt>(promptTemplate: ObjectFromTextPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt> | FlexibleObjectFromTextPromptTemplate<INPUT_PROMPT, unknown>): ObjectFromTextStreamingModel<INPUT_PROMPT, unknown, TextStreamingModel<unknown, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>> | ObjectFromTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, TextStreamingModel<OpenAIChatPrompt, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>>;
|
144
144
|
withTextPrompt(): PromptTemplateFullTextModel<string, OpenAIChatPrompt, OpenAIChatSettings, this>;
|
145
145
|
withInstructionPrompt(): PromptTemplateFullTextModel<import("../../index.js").InstructionPrompt, OpenAIChatPrompt, OpenAIChatSettings, this>;
|
146
146
|
withChatPrompt(): PromptTemplateFullTextModel<import("../../index.js").ChatPrompt, OpenAIChatPrompt, OpenAIChatSettings, this>;
|
@@ -1,8 +1,8 @@
|
|
1
|
-
import {
|
1
|
+
import { ObjectFromTextStreamingModel } from "../../model-function/generate-object/ObjectFromTextStreamingModel.js";
|
2
2
|
import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
|
3
3
|
import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
|
4
4
|
import { AbstractOpenAIChatModel, } from "./AbstractOpenAIChatModel.js";
|
5
|
-
import {
|
5
|
+
import { OpenAIChatFunctionCallObjectGenerationModel } from "./OpenAIChatFunctionCallObjectGenerationModel.js";
|
6
6
|
import { chat, identity, instruction, text, } from "./OpenAIChatPromptTemplate.js";
|
7
7
|
import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
|
8
8
|
import { countOpenAIChatPromptTokens } from "./countOpenAIChatMessageTokens.js";
|
@@ -204,21 +204,21 @@ export class OpenAIChatModel extends AbstractOpenAIChatModel {
|
|
204
204
|
];
|
205
205
|
return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
|
206
206
|
}
|
207
|
-
|
208
|
-
return new
|
207
|
+
asFunctionCallObjectGenerationModel({ fnName, fnDescription, }) {
|
208
|
+
return new OpenAIChatFunctionCallObjectGenerationModel({
|
209
209
|
model: this,
|
210
210
|
fnName,
|
211
211
|
fnDescription,
|
212
212
|
promptTemplate: identity(),
|
213
213
|
});
|
214
214
|
}
|
215
|
-
|
215
|
+
asObjectGenerationModel(promptTemplate) {
|
216
216
|
return "adaptModel" in promptTemplate
|
217
|
-
? new
|
217
|
+
? new ObjectFromTextStreamingModel({
|
218
218
|
model: promptTemplate.adaptModel(this),
|
219
219
|
template: promptTemplate,
|
220
220
|
})
|
221
|
-
: new
|
221
|
+
: new ObjectFromTextStreamingModel({
|
222
222
|
model: this,
|
223
223
|
template: promptTemplate,
|
224
224
|
});
|
@@ -2,7 +2,7 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
const zod_1 = require("zod");
|
4
4
|
const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
|
5
|
-
const
|
5
|
+
const streamObject_js_1 = require("../../model-function/generate-object/streamObject.cjs");
|
6
6
|
const streamText_js_1 = require("../../model-function/generate-text/streamText.cjs");
|
7
7
|
const StreamingTestServer_js_1 = require("../../test/StreamingTestServer.cjs");
|
8
8
|
const arrayFromAsync_js_1 = require("../../test/arrayFromAsync.cjs");
|
@@ -38,7 +38,7 @@ describe("streamText", () => {
|
|
38
38
|
expect(await (0, arrayFromAsync_js_1.arrayFromAsync)(stream)).toStrictEqual(["A"]);
|
39
39
|
});
|
40
40
|
});
|
41
|
-
describe("
|
41
|
+
describe("streamObject", () => {
|
42
42
|
const server = new StreamingTestServer_js_1.StreamingTestServer("https://api.openai.com/v1/chat/completions");
|
43
43
|
server.setupTestEnvironment();
|
44
44
|
it("should return a text stream", async () => {
|
@@ -76,12 +76,12 @@ describe("streamStructure", () => {
|
|
76
76
|
`"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}\n\n`,
|
77
77
|
`data: [DONE]\n\n`,
|
78
78
|
];
|
79
|
-
const stream = await (0,
|
79
|
+
const stream = await (0, streamObject_js_1.streamObject)({
|
80
80
|
model: new OpenAIChatModel_js_1.OpenAIChatModel({
|
81
81
|
api: new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration({ apiKey: "test-key" }),
|
82
82
|
model: "gpt-3.5-turbo",
|
83
83
|
})
|
84
|
-
.
|
84
|
+
.asFunctionCallObjectGenerationModel({
|
85
85
|
fnName: "generateCharacter",
|
86
86
|
fnDescription: "Generate character descriptions.",
|
87
87
|
})
|
@@ -89,11 +89,13 @@ describe("streamStructure", () => {
|
|
89
89
|
schema: (0, ZodSchema_js_1.zodSchema)(zod_1.z.object({ name: zod_1.z.string() })),
|
90
90
|
prompt: "generate a name",
|
91
91
|
});
|
92
|
-
|
92
|
+
const streamAsArray = await (0, arrayFromAsync_js_1.arrayFromAsync)(stream);
|
93
|
+
expect(streamAsArray.map((entry) => entry.partialObject)).toStrictEqual([
|
93
94
|
{},
|
94
95
|
{ name: "" },
|
95
96
|
{ name: "M" },
|
96
97
|
{ name: "Mike" },
|
98
|
+
{ name: "Mike" }, // double occurrence on purpose (stream text)
|
97
99
|
]);
|
98
100
|
});
|
99
101
|
});
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { z } from "zod";
|
2
2
|
import { zodSchema } from "../../core/schema/ZodSchema.js";
|
3
|
-
import {
|
3
|
+
import { streamObject } from "../../model-function/generate-object/streamObject.js";
|
4
4
|
import { streamText } from "../../model-function/generate-text/streamText.js";
|
5
5
|
import { StreamingTestServer } from "../../test/StreamingTestServer.js";
|
6
6
|
import { arrayFromAsync } from "../../test/arrayFromAsync.js";
|
@@ -36,7 +36,7 @@ describe("streamText", () => {
|
|
36
36
|
expect(await arrayFromAsync(stream)).toStrictEqual(["A"]);
|
37
37
|
});
|
38
38
|
});
|
39
|
-
describe("
|
39
|
+
describe("streamObject", () => {
|
40
40
|
const server = new StreamingTestServer("https://api.openai.com/v1/chat/completions");
|
41
41
|
server.setupTestEnvironment();
|
42
42
|
it("should return a text stream", async () => {
|
@@ -74,12 +74,12 @@ describe("streamStructure", () => {
|
|
74
74
|
`"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}\n\n`,
|
75
75
|
`data: [DONE]\n\n`,
|
76
76
|
];
|
77
|
-
const stream = await
|
77
|
+
const stream = await streamObject({
|
78
78
|
model: new OpenAIChatModel({
|
79
79
|
api: new OpenAIApiConfiguration({ apiKey: "test-key" }),
|
80
80
|
model: "gpt-3.5-turbo",
|
81
81
|
})
|
82
|
-
.
|
82
|
+
.asFunctionCallObjectGenerationModel({
|
83
83
|
fnName: "generateCharacter",
|
84
84
|
fnDescription: "Generate character descriptions.",
|
85
85
|
})
|
@@ -87,11 +87,13 @@ describe("streamStructure", () => {
|
|
87
87
|
schema: zodSchema(z.object({ name: z.string() })),
|
88
88
|
prompt: "generate a name",
|
89
89
|
});
|
90
|
-
|
90
|
+
const streamAsArray = await arrayFromAsync(stream);
|
91
|
+
expect(streamAsArray.map((entry) => entry.partialObject)).toStrictEqual([
|
91
92
|
{},
|
92
93
|
{ name: "" },
|
93
94
|
{ name: "M" },
|
94
95
|
{ name: "Mike" },
|
96
|
+
{ name: "Mike" }, // double occurrence on purpose (stream text)
|
95
97
|
]);
|
96
98
|
});
|
97
99
|
});
|
@@ -1,7 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.OpenAICompatibleChatModel = void 0;
|
4
|
-
const
|
4
|
+
const ObjectFromTextStreamingModel_js_1 = require("../../model-function/generate-object/ObjectFromTextStreamingModel.cjs");
|
5
5
|
const PromptTemplateFullTextModel_js_1 = require("../../model-function/generate-text/PromptTemplateFullTextModel.cjs");
|
6
6
|
const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
|
7
7
|
const AbstractOpenAIChatModel_js_1 = require("../openai/AbstractOpenAIChatModel.cjs");
|
@@ -57,13 +57,13 @@ class OpenAICompatibleChatModel extends AbstractOpenAIChatModel_js_1.AbstractOpe
|
|
57
57
|
];
|
58
58
|
return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
|
59
59
|
}
|
60
|
-
|
60
|
+
asObjectGenerationModel(promptTemplate) {
|
61
61
|
return "adaptModel" in promptTemplate
|
62
|
-
? new
|
62
|
+
? new ObjectFromTextStreamingModel_js_1.ObjectFromTextStreamingModel({
|
63
63
|
model: promptTemplate.adaptModel(this),
|
64
64
|
template: promptTemplate,
|
65
65
|
})
|
66
|
-
: new
|
66
|
+
: new ObjectFromTextStreamingModel_js_1.ObjectFromTextStreamingModel({
|
67
67
|
model: this,
|
68
68
|
template: promptTemplate,
|
69
69
|
});
|
@@ -1,5 +1,5 @@
|
|
1
|
-
import {
|
2
|
-
import {
|
1
|
+
import { FlexibleObjectFromTextPromptTemplate, ObjectFromTextPromptTemplate } from "../../model-function/generate-object/ObjectFromTextPromptTemplate.js";
|
2
|
+
import { ObjectFromTextStreamingModel } from "../../model-function/generate-object/ObjectFromTextStreamingModel.js";
|
3
3
|
import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
|
4
4
|
import { TextStreamingBaseModel, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
|
5
5
|
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
@@ -27,7 +27,7 @@ export declare class OpenAICompatibleChatModel extends AbstractOpenAIChatModel<O
|
|
27
27
|
readonly tokenizer: undefined;
|
28
28
|
readonly countPromptTokens: undefined;
|
29
29
|
get settingsForEvent(): Partial<OpenAICompatibleChatSettings>;
|
30
|
-
|
30
|
+
asObjectGenerationModel<INPUT_PROMPT, OpenAIChatPrompt>(promptTemplate: ObjectFromTextPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt> | FlexibleObjectFromTextPromptTemplate<INPUT_PROMPT, unknown>): ObjectFromTextStreamingModel<INPUT_PROMPT, unknown, TextStreamingModel<unknown, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>> | ObjectFromTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, TextStreamingModel<OpenAIChatPrompt, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>>;
|
31
31
|
withTextPrompt(): PromptTemplateFullTextModel<string, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
|
32
32
|
withInstructionPrompt(): PromptTemplateFullTextModel<import("../../index.js").InstructionPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
|
33
33
|
withChatPrompt(): PromptTemplateFullTextModel<import("../../index.js").ChatPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import {
|
1
|
+
import { ObjectFromTextStreamingModel } from "../../model-function/generate-object/ObjectFromTextStreamingModel.js";
|
2
2
|
import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
|
3
3
|
import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
|
4
4
|
import { AbstractOpenAIChatModel, } from "../openai/AbstractOpenAIChatModel.js";
|
@@ -54,13 +54,13 @@ export class OpenAICompatibleChatModel extends AbstractOpenAIChatModel {
|
|
54
54
|
];
|
55
55
|
return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
|
56
56
|
}
|
57
|
-
|
57
|
+
asObjectGenerationModel(promptTemplate) {
|
58
58
|
return "adaptModel" in promptTemplate
|
59
|
-
? new
|
59
|
+
? new ObjectFromTextStreamingModel({
|
60
60
|
model: promptTemplate.adaptModel(this),
|
61
61
|
template: promptTemplate,
|
62
62
|
})
|
63
|
-
: new
|
63
|
+
: new ObjectFromTextStreamingModel({
|
64
64
|
model: this,
|
65
65
|
template: promptTemplate,
|
66
66
|
});
|
@@ -100,9 +100,9 @@ const successfulResponseHandler = async ({ response, url, requestBodyValues }) =
|
|
100
100
|
requestBodyValues,
|
101
101
|
});
|
102
102
|
}
|
103
|
-
if ("error" in parsedResult.
|
103
|
+
if ("error" in parsedResult.value) {
|
104
104
|
throw new ApiCallError_js_1.ApiCallError({
|
105
|
-
message: parsedResult.
|
105
|
+
message: parsedResult.value.error,
|
106
106
|
statusCode: response.status,
|
107
107
|
responseBody,
|
108
108
|
url,
|
@@ -110,7 +110,7 @@ const successfulResponseHandler = async ({ response, url, requestBodyValues }) =
|
|
110
110
|
});
|
111
111
|
}
|
112
112
|
return {
|
113
|
-
text: parsedResult.
|
113
|
+
text: parsedResult.value.text.trim(),
|
114
114
|
};
|
115
115
|
};
|
116
116
|
const failedResponseHandler = async ({ response, url, requestBodyValues, }) => {
|
@@ -96,9 +96,9 @@ const successfulResponseHandler = async ({ response, url, requestBodyValues }) =
|
|
96
96
|
requestBodyValues,
|
97
97
|
});
|
98
98
|
}
|
99
|
-
if ("error" in parsedResult.
|
99
|
+
if ("error" in parsedResult.value) {
|
100
100
|
throw new ApiCallError({
|
101
|
-
message: parsedResult.
|
101
|
+
message: parsedResult.value.error,
|
102
102
|
statusCode: response.status,
|
103
103
|
responseBody,
|
104
104
|
url,
|
@@ -106,7 +106,7 @@ const successfulResponseHandler = async ({ response, url, requestBodyValues }) =
|
|
106
106
|
});
|
107
107
|
}
|
108
108
|
return {
|
109
|
-
text: parsedResult.
|
109
|
+
text: parsedResult.value.text.trim(),
|
110
110
|
};
|
111
111
|
};
|
112
112
|
const failedResponseHandler = async ({ response, url, requestBodyValues, }) => {
|
package/package.json
CHANGED
package/tool/WebSearchTool.cjs
CHANGED
@@ -13,7 +13,7 @@ const RETURN_TYPE_SCHEMA = (0, ZodSchema_js_1.zodSchema)(zod_1.z.object({
|
|
13
13
|
}));
|
14
14
|
// expose the schemas to library consumers:
|
15
15
|
const createParameters = (description) =>
|
16
|
-
// same
|
16
|
+
// same schema, but with description:
|
17
17
|
(0, ZodSchema_js_1.zodSchema)(zod_1.z.object({
|
18
18
|
query: zod_1.z.string().describe(description),
|
19
19
|
}));
|
package/tool/WebSearchTool.js
CHANGED
@@ -10,7 +10,7 @@ const RETURN_TYPE_SCHEMA = zodSchema(z.object({
|
|
10
10
|
}));
|
11
11
|
// expose the schemas to library consumers:
|
12
12
|
const createParameters = (description) =>
|
13
|
-
// same
|
13
|
+
// same schema, but with description:
|
14
14
|
zodSchema(z.object({
|
15
15
|
query: z.string().describe(description),
|
16
16
|
}));
|