modelfusion 0.68.1 → 0.70.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -9
- package/core/schema/JSONParseError.cjs +1 -1
- package/core/schema/JSONParseError.d.ts +1 -1
- package/core/schema/JSONParseError.js +1 -1
- package/model-function/ModelCallEvent.d.ts +3 -2
- package/model-function/generate-structure/StructureValidationError.cjs +11 -0
- package/model-function/generate-structure/StructureValidationError.d.ts +9 -0
- package/model-function/generate-structure/StructureValidationError.js +11 -0
- package/model-function/generate-text/index.cjs +1 -8
- package/model-function/generate-text/index.d.ts +1 -8
- package/model-function/generate-text/index.js +1 -8
- package/model-function/generate-text/prompt-format/AlpacaPromptFormat.cjs +31 -3
- package/model-function/generate-text/prompt-format/AlpacaPromptFormat.d.ts +29 -1
- package/model-function/generate-text/prompt-format/AlpacaPromptFormat.js +29 -1
- package/model-function/generate-text/prompt-format/ChatMLPromptFormat.cjs +79 -0
- package/model-function/generate-text/prompt-format/ChatMLPromptFormat.d.ts +31 -0
- package/model-function/generate-text/prompt-format/ChatMLPromptFormat.js +74 -0
- package/model-function/generate-text/prompt-format/ChatPrompt.d.ts +28 -23
- package/model-function/generate-text/prompt-format/ChatPromptValidationError.cjs +17 -0
- package/model-function/generate-text/prompt-format/ChatPromptValidationError.d.ts +8 -0
- package/model-function/generate-text/prompt-format/ChatPromptValidationError.js +13 -0
- package/model-function/generate-text/prompt-format/Llama2PromptFormat.cjs +41 -27
- package/model-function/generate-text/prompt-format/Llama2PromptFormat.d.ts +20 -2
- package/model-function/generate-text/prompt-format/Llama2PromptFormat.js +38 -24
- package/model-function/generate-text/prompt-format/TextPromptFormat.cjs +27 -30
- package/model-function/generate-text/prompt-format/TextPromptFormat.d.ts +7 -5
- package/model-function/generate-text/prompt-format/TextPromptFormat.js +24 -27
- package/model-function/generate-text/prompt-format/VicunaPromptFormat.cjs +21 -29
- package/model-function/generate-text/prompt-format/VicunaPromptFormat.d.ts +2 -2
- package/model-function/generate-text/prompt-format/VicunaPromptFormat.js +19 -27
- package/model-function/generate-text/prompt-format/index.cjs +39 -0
- package/model-function/generate-text/prompt-format/index.d.ts +10 -0
- package/model-function/generate-text/prompt-format/index.js +10 -0
- package/model-function/generate-text/prompt-format/trimChatPrompt.cjs +17 -22
- package/model-function/generate-text/prompt-format/trimChatPrompt.js +17 -22
- package/model-function/generate-text/prompt-format/validateChatPrompt.cjs +12 -24
- package/model-function/generate-text/prompt-format/validateChatPrompt.d.ts +0 -3
- package/model-function/generate-text/prompt-format/validateChatPrompt.js +10 -21
- package/model-function/generate-tool-call/ToolCallDefinition.cjs +2 -0
- package/model-function/generate-tool-call/ToolCallDefinition.d.ts +7 -0
- package/model-function/generate-tool-call/ToolCallDefinition.js +1 -0
- package/model-function/generate-tool-call/ToolCallGenerationError.cjs +35 -0
- package/model-function/generate-tool-call/ToolCallGenerationError.d.ts +15 -0
- package/model-function/generate-tool-call/ToolCallGenerationError.js +31 -0
- package/model-function/generate-tool-call/ToolCallGenerationEvent.cjs +2 -0
- package/model-function/generate-tool-call/ToolCallGenerationEvent.d.ts +23 -0
- package/model-function/generate-tool-call/ToolCallGenerationEvent.js +1 -0
- package/model-function/generate-tool-call/ToolCallGenerationModel.cjs +2 -0
- package/model-function/generate-tool-call/ToolCallGenerationModel.d.ts +19 -0
- package/model-function/generate-tool-call/ToolCallGenerationModel.js +1 -0
- package/model-function/generate-tool-call/ToolCallParametersValidationError.cjs +44 -0
- package/model-function/generate-tool-call/ToolCallParametersValidationError.d.ts +18 -0
- package/model-function/generate-tool-call/ToolCallParametersValidationError.js +40 -0
- package/model-function/generate-tool-call/generateToolCall.cjs +58 -0
- package/model-function/generate-tool-call/generateToolCall.d.ts +20 -0
- package/model-function/generate-tool-call/generateToolCall.js +54 -0
- package/model-function/generate-tool-call/index.cjs +21 -0
- package/model-function/generate-tool-call/index.d.ts +5 -0
- package/model-function/generate-tool-call/index.js +5 -0
- package/model-function/index.cjs +1 -0
- package/model-function/index.d.ts +1 -0
- package/model-function/index.js +1 -0
- package/model-provider/anthropic/AnthropicPromptFormat.cjs +22 -26
- package/model-provider/anthropic/AnthropicPromptFormat.d.ts +4 -2
- package/model-provider/anthropic/AnthropicPromptFormat.js +19 -23
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +2 -2
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +3 -3
- package/model-provider/anthropic/index.cjs +14 -2
- package/model-provider/anthropic/index.d.ts +1 -1
- package/model-provider/anthropic/index.js +1 -1
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.cjs +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +1 -1
- package/model-provider/cohere/CohereTextGenerationModel.js +4 -4
- package/model-provider/llamacpp/{mapInstructionPromptToBakLLaVA1ForLlamaCppFormat.cjs → LlamaCppBakLLaVA1Format.cjs} +4 -4
- package/model-provider/llamacpp/{mapInstructionPromptToBakLLaVA1ForLlamaCppFormat.d.ts → LlamaCppBakLLaVA1Format.d.ts} +2 -2
- package/model-provider/llamacpp/{mapInstructionPromptToBakLLaVA1ForLlamaCppFormat.js → LlamaCppBakLLaVA1Format.js} +2 -2
- package/model-provider/llamacpp/index.cjs +14 -2
- package/model-provider/llamacpp/index.d.ts +1 -1
- package/model-provider/llamacpp/index.js +1 -1
- package/model-provider/openai/OpenAICompletionModel.cjs +4 -4
- package/model-provider/openai/OpenAICompletionModel.d.ts +1 -1
- package/model-provider/openai/OpenAICompletionModel.js +5 -5
- package/model-provider/openai/chat/OpenAIChatModel.cjs +50 -4
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +192 -20
- package/model-provider/openai/chat/OpenAIChatModel.js +51 -5
- package/model-provider/openai/chat/OpenAIChatPromptFormat.cjs +22 -34
- package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +2 -2
- package/model-provider/openai/chat/OpenAIChatPromptFormat.js +19 -31
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +10 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +10 -0
- package/model-provider/openai/index.cjs +14 -2
- package/model-provider/openai/index.d.ts +1 -1
- package/model-provider/openai/index.js +1 -1
- package/package.json +3 -3
@@ -8,6 +8,7 @@ const secure_json_parse_1 = __importDefault(require("secure-json-parse"));
|
|
8
8
|
const zod_1 = require("zod");
|
9
9
|
const callWithRetryAndThrottle_js_1 = require("../../../core/api/callWithRetryAndThrottle.cjs");
|
10
10
|
const postToApi_js_1 = require("../../../core/api/postToApi.cjs");
|
11
|
+
const parseJSON_js_1 = require("../../../core/schema/parseJSON.cjs");
|
11
12
|
const AbstractModel_js_1 = require("../../../model-function/AbstractModel.cjs");
|
12
13
|
const StructureParseError_js_1 = require("../../../model-function/generate-structure/StructureParseError.cjs");
|
13
14
|
const parsePartialJson_js_1 = require("../../../model-function/generate-structure/parsePartialJson.cjs");
|
@@ -210,9 +211,11 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
|
|
210
211
|
throttle: this.settings.api?.throttle,
|
211
212
|
call: async () => callOpenAIChatCompletionAPI({
|
212
213
|
...this.settings,
|
213
|
-
// function calling:
|
214
|
+
// function & tool calling:
|
214
215
|
functions: options.functions ?? this.settings.functions,
|
215
216
|
functionCall: options.functionCall ?? this.settings.functionCall,
|
217
|
+
tools: options.tools ?? this.settings.tools,
|
218
|
+
toolChoice: options.toolChoice ?? this.settings.toolChoice,
|
216
219
|
// map to OpenAI API names:
|
217
220
|
stop: this.settings.stopSequences,
|
218
221
|
maxTokens: this.settings.maxCompletionTokens,
|
@@ -358,6 +361,37 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
|
|
358
361
|
});
|
359
362
|
}
|
360
363
|
}
|
364
|
+
async doGenerateToolCall(tool, prompt, options) {
|
365
|
+
const response = await this.callAPI(prompt, {
|
366
|
+
...options,
|
367
|
+
responseFormat: exports.OpenAIChatResponseFormat.json,
|
368
|
+
toolChoice: {
|
369
|
+
type: "function",
|
370
|
+
function: { name: tool.name },
|
371
|
+
},
|
372
|
+
tools: [
|
373
|
+
{
|
374
|
+
type: "function",
|
375
|
+
function: {
|
376
|
+
name: tool.name,
|
377
|
+
description: tool.description,
|
378
|
+
parameters: tool.parameters.getJsonSchema(),
|
379
|
+
},
|
380
|
+
},
|
381
|
+
],
|
382
|
+
});
|
383
|
+
const toolCalls = response.choices[0]?.message.tool_calls;
|
384
|
+
return {
|
385
|
+
response,
|
386
|
+
value: toolCalls == null || toolCalls.length === 0
|
387
|
+
? null
|
388
|
+
: {
|
389
|
+
id: toolCalls[0].id,
|
390
|
+
parameters: (0, parseJSON_js_1.parseJSON)({ text: toolCalls[0].function.arguments }),
|
391
|
+
},
|
392
|
+
usage: this.extractUsage(response),
|
393
|
+
};
|
394
|
+
}
|
361
395
|
extractUsage(response) {
|
362
396
|
return {
|
363
397
|
promptTokens: response.usage.prompt_tokens,
|
@@ -369,13 +403,13 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
|
|
369
403
|
* Returns this model with an instruction prompt format.
|
370
404
|
*/
|
371
405
|
withInstructionPrompt() {
|
372
|
-
return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.
|
406
|
+
return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.instruction)());
|
373
407
|
}
|
374
408
|
/**
|
375
409
|
* Returns this model with a chat prompt format.
|
376
410
|
*/
|
377
411
|
withChatPrompt() {
|
378
|
-
return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.
|
412
|
+
return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.chat)());
|
379
413
|
}
|
380
414
|
withPromptFormat(promptFormat) {
|
381
415
|
return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
|
@@ -405,6 +439,16 @@ const openAIChatResponseSchema = zod_1.z.object({
|
|
405
439
|
arguments: zod_1.z.string(),
|
406
440
|
})
|
407
441
|
.optional(),
|
442
|
+
tool_calls: zod_1.z
|
443
|
+
.array(zod_1.z.object({
|
444
|
+
id: zod_1.z.string(),
|
445
|
+
type: zod_1.z.literal("function"),
|
446
|
+
function: zod_1.z.object({
|
447
|
+
name: zod_1.z.string(),
|
448
|
+
arguments: zod_1.z.string(),
|
449
|
+
}),
|
450
|
+
}))
|
451
|
+
.optional(),
|
408
452
|
}),
|
409
453
|
index: zod_1.z.number(),
|
410
454
|
logprobs: zod_1.z.nullable(zod_1.z.any()),
|
@@ -429,7 +473,7 @@ const openAIChatResponseSchema = zod_1.z.object({
|
|
429
473
|
total_tokens: zod_1.z.number(),
|
430
474
|
}),
|
431
475
|
});
|
432
|
-
async function callOpenAIChatCompletionAPI({ api = new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration(), abortSignal, responseFormat, model, messages, functions, functionCall, temperature, topP, n, stop, maxTokens, presencePenalty, frequencyPenalty, logitBias, user, openAIResponseFormat, seed, }) {
|
476
|
+
async function callOpenAIChatCompletionAPI({ api = new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration(), abortSignal, responseFormat, model, messages, functions, functionCall, tools, toolChoice, temperature, topP, n, stop, maxTokens, presencePenalty, frequencyPenalty, logitBias, user, openAIResponseFormat, seed, }) {
|
433
477
|
// empty arrays are not allowed for stop:
|
434
478
|
if (stop != null && Array.isArray(stop) && stop.length === 0) {
|
435
479
|
stop = undefined;
|
@@ -443,6 +487,8 @@ async function callOpenAIChatCompletionAPI({ api = new OpenAIApiConfiguration_js
|
|
443
487
|
messages,
|
444
488
|
functions,
|
445
489
|
function_call: functionCall,
|
490
|
+
tools,
|
491
|
+
tool_choice: toolChoice,
|
446
492
|
temperature,
|
447
493
|
top_p: topP,
|
448
494
|
n,
|
@@ -10,6 +10,8 @@ import { StructureOrTextGenerationModel } from "../../../model-function/generate
|
|
10
10
|
import { PromptFormatTextStreamingModel } from "../../../model-function/generate-text/PromptFormatTextStreamingModel.js";
|
11
11
|
import { TextGenerationModelSettings, TextStreamingModel } from "../../../model-function/generate-text/TextGenerationModel.js";
|
12
12
|
import { TextGenerationPromptFormat } from "../../../model-function/generate-text/TextGenerationPromptFormat.js";
|
13
|
+
import { ToolCallDefinition } from "../../../model-function/generate-tool-call/ToolCallDefinition.js";
|
14
|
+
import { ToolCallGenerationModel } from "../../../model-function/generate-tool-call/ToolCallGenerationModel.js";
|
13
15
|
import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
|
14
16
|
import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
|
15
17
|
export declare const OPENAI_CHAT_MODELS: {
|
@@ -117,6 +119,20 @@ export interface OpenAIChatCallSettings {
|
|
117
119
|
functionCall?: "none" | "auto" | {
|
118
120
|
name: string;
|
119
121
|
};
|
122
|
+
tools?: Array<{
|
123
|
+
type: "function";
|
124
|
+
function: {
|
125
|
+
name: string;
|
126
|
+
description?: string;
|
127
|
+
parameters: unknown;
|
128
|
+
};
|
129
|
+
}>;
|
130
|
+
toolChoice?: "none" | "auto" | {
|
131
|
+
type: "function";
|
132
|
+
function: {
|
133
|
+
name: string;
|
134
|
+
};
|
135
|
+
};
|
120
136
|
stop?: string | string[];
|
121
137
|
maxTokens?: number;
|
122
138
|
temperature?: number;
|
@@ -152,7 +168,7 @@ export interface OpenAIChatSettings extends TextGenerationModelSettings, Omit<Op
|
|
152
168
|
* ),
|
153
169
|
* ]);
|
154
170
|
*/
|
155
|
-
export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextStreamingModel<OpenAIChatMessage[], OpenAIChatSettings>, StructureGenerationModel<OpenAIChatMessage[], OpenAIChatSettings>, StructureOrTextGenerationModel<OpenAIChatMessage[], OpenAIChatSettings> {
|
171
|
+
export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextStreamingModel<OpenAIChatMessage[], OpenAIChatSettings>, StructureGenerationModel<OpenAIChatMessage[], OpenAIChatSettings>, StructureOrTextGenerationModel<OpenAIChatMessage[], OpenAIChatSettings>, ToolCallGenerationModel<OpenAIChatMessage[], OpenAIChatSettings> {
|
156
172
|
constructor(settings: OpenAIChatSettings);
|
157
173
|
readonly provider: "openai";
|
158
174
|
get modelName(): OpenAIChatModelType;
|
@@ -166,14 +182,10 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
166
182
|
callAPI<RESULT>(messages: Array<OpenAIChatMessage>, options: {
|
167
183
|
responseFormat: OpenAIChatResponseFormatType<RESULT>;
|
168
184
|
} & FunctionOptions & {
|
169
|
-
functions?:
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
}>;
|
174
|
-
functionCall?: "none" | "auto" | {
|
175
|
-
name: string;
|
176
|
-
};
|
185
|
+
functions?: OpenAIChatCallSettings["functions"];
|
186
|
+
functionCall?: OpenAIChatCallSettings["functionCall"];
|
187
|
+
tools?: OpenAIChatCallSettings["tools"];
|
188
|
+
toolChoice?: OpenAIChatCallSettings["toolChoice"];
|
177
189
|
}): Promise<RESULT>;
|
178
190
|
get settingsForEvent(): Partial<OpenAIChatSettings>;
|
179
191
|
doGenerateText(prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<{
|
@@ -188,12 +200,20 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
188
200
|
id: string;
|
189
201
|
choices: {
|
190
202
|
message: {
|
191
|
-
content: string | null;
|
192
203
|
role: "assistant";
|
204
|
+
content: string | null;
|
193
205
|
function_call?: {
|
194
206
|
name: string;
|
195
207
|
arguments: string;
|
196
208
|
} | undefined;
|
209
|
+
tool_calls?: {
|
210
|
+
function: {
|
211
|
+
name: string;
|
212
|
+
arguments: string;
|
213
|
+
};
|
214
|
+
type: "function";
|
215
|
+
id: string;
|
216
|
+
}[] | undefined;
|
197
217
|
};
|
198
218
|
index: number;
|
199
219
|
logprobs?: any;
|
@@ -229,12 +249,20 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
229
249
|
id: string;
|
230
250
|
choices: {
|
231
251
|
message: {
|
232
|
-
content: string | null;
|
233
252
|
role: "assistant";
|
253
|
+
content: string | null;
|
234
254
|
function_call?: {
|
235
255
|
name: string;
|
236
256
|
arguments: string;
|
237
257
|
} | undefined;
|
258
|
+
tool_calls?: {
|
259
|
+
function: {
|
260
|
+
name: string;
|
261
|
+
arguments: string;
|
262
|
+
};
|
263
|
+
type: "function";
|
264
|
+
id: string;
|
265
|
+
}[] | undefined;
|
238
266
|
};
|
239
267
|
index: number;
|
240
268
|
logprobs?: any;
|
@@ -264,12 +292,20 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
264
292
|
id: string;
|
265
293
|
choices: {
|
266
294
|
message: {
|
267
|
-
content: string | null;
|
268
295
|
role: "assistant";
|
296
|
+
content: string | null;
|
269
297
|
function_call?: {
|
270
298
|
name: string;
|
271
299
|
arguments: string;
|
272
300
|
} | undefined;
|
301
|
+
tool_calls?: {
|
302
|
+
function: {
|
303
|
+
name: string;
|
304
|
+
arguments: string;
|
305
|
+
};
|
306
|
+
type: "function";
|
307
|
+
id: string;
|
308
|
+
}[] | undefined;
|
273
309
|
};
|
274
310
|
index: number;
|
275
311
|
logprobs?: any;
|
@@ -301,12 +337,20 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
301
337
|
id: string;
|
302
338
|
choices: {
|
303
339
|
message: {
|
304
|
-
content: string | null;
|
305
340
|
role: "assistant";
|
341
|
+
content: string | null;
|
306
342
|
function_call?: {
|
307
343
|
name: string;
|
308
344
|
arguments: string;
|
309
345
|
} | undefined;
|
346
|
+
tool_calls?: {
|
347
|
+
function: {
|
348
|
+
name: string;
|
349
|
+
arguments: string;
|
350
|
+
};
|
351
|
+
type: "function";
|
352
|
+
id: string;
|
353
|
+
}[] | undefined;
|
310
354
|
};
|
311
355
|
index: number;
|
312
356
|
logprobs?: any;
|
@@ -327,6 +371,50 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
327
371
|
totalTokens: number;
|
328
372
|
};
|
329
373
|
}>;
|
374
|
+
doGenerateToolCall(tool: ToolCallDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<{
|
375
|
+
response: {
|
376
|
+
object: "chat.completion";
|
377
|
+
usage: {
|
378
|
+
prompt_tokens: number;
|
379
|
+
total_tokens: number;
|
380
|
+
completion_tokens: number;
|
381
|
+
};
|
382
|
+
model: string;
|
383
|
+
id: string;
|
384
|
+
choices: {
|
385
|
+
message: {
|
386
|
+
role: "assistant";
|
387
|
+
content: string | null;
|
388
|
+
function_call?: {
|
389
|
+
name: string;
|
390
|
+
arguments: string;
|
391
|
+
} | undefined;
|
392
|
+
tool_calls?: {
|
393
|
+
function: {
|
394
|
+
name: string;
|
395
|
+
arguments: string;
|
396
|
+
};
|
397
|
+
type: "function";
|
398
|
+
id: string;
|
399
|
+
}[] | undefined;
|
400
|
+
};
|
401
|
+
index: number;
|
402
|
+
logprobs?: any;
|
403
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
404
|
+
}[];
|
405
|
+
created: number;
|
406
|
+
system_fingerprint?: string | undefined;
|
407
|
+
};
|
408
|
+
value: {
|
409
|
+
id: string;
|
410
|
+
parameters: unknown;
|
411
|
+
} | null;
|
412
|
+
usage: {
|
413
|
+
promptTokens: number;
|
414
|
+
completionTokens: number;
|
415
|
+
totalTokens: number;
|
416
|
+
};
|
417
|
+
}>;
|
330
418
|
extractUsage(response: OpenAIChatResponse): {
|
331
419
|
promptTokens: number;
|
332
420
|
completionTokens: number;
|
@@ -359,44 +447,104 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
359
447
|
name: string;
|
360
448
|
arguments: string;
|
361
449
|
}>>;
|
450
|
+
tool_calls: z.ZodOptional<z.ZodArray<z.ZodObject<{
|
451
|
+
id: z.ZodString;
|
452
|
+
type: z.ZodLiteral<"function">;
|
453
|
+
function: z.ZodObject<{
|
454
|
+
name: z.ZodString;
|
455
|
+
arguments: z.ZodString;
|
456
|
+
}, "strip", z.ZodTypeAny, {
|
457
|
+
name: string;
|
458
|
+
arguments: string;
|
459
|
+
}, {
|
460
|
+
name: string;
|
461
|
+
arguments: string;
|
462
|
+
}>;
|
463
|
+
}, "strip", z.ZodTypeAny, {
|
464
|
+
function: {
|
465
|
+
name: string;
|
466
|
+
arguments: string;
|
467
|
+
};
|
468
|
+
type: "function";
|
469
|
+
id: string;
|
470
|
+
}, {
|
471
|
+
function: {
|
472
|
+
name: string;
|
473
|
+
arguments: string;
|
474
|
+
};
|
475
|
+
type: "function";
|
476
|
+
id: string;
|
477
|
+
}>, "many">>;
|
362
478
|
}, "strip", z.ZodTypeAny, {
|
363
|
-
content: string | null;
|
364
479
|
role: "assistant";
|
480
|
+
content: string | null;
|
365
481
|
function_call?: {
|
366
482
|
name: string;
|
367
483
|
arguments: string;
|
368
484
|
} | undefined;
|
485
|
+
tool_calls?: {
|
486
|
+
function: {
|
487
|
+
name: string;
|
488
|
+
arguments: string;
|
489
|
+
};
|
490
|
+
type: "function";
|
491
|
+
id: string;
|
492
|
+
}[] | undefined;
|
369
493
|
}, {
|
370
|
-
content: string | null;
|
371
494
|
role: "assistant";
|
495
|
+
content: string | null;
|
372
496
|
function_call?: {
|
373
497
|
name: string;
|
374
498
|
arguments: string;
|
375
499
|
} | undefined;
|
500
|
+
tool_calls?: {
|
501
|
+
function: {
|
502
|
+
name: string;
|
503
|
+
arguments: string;
|
504
|
+
};
|
505
|
+
type: "function";
|
506
|
+
id: string;
|
507
|
+
}[] | undefined;
|
376
508
|
}>;
|
377
509
|
index: z.ZodNumber;
|
378
510
|
logprobs: z.ZodNullable<z.ZodAny>;
|
379
511
|
finish_reason: z.ZodNullable<z.ZodOptional<z.ZodEnum<["stop", "length", "tool_calls", "content_filter", "function_call"]>>>;
|
380
512
|
}, "strip", z.ZodTypeAny, {
|
381
513
|
message: {
|
382
|
-
content: string | null;
|
383
514
|
role: "assistant";
|
515
|
+
content: string | null;
|
384
516
|
function_call?: {
|
385
517
|
name: string;
|
386
518
|
arguments: string;
|
387
519
|
} | undefined;
|
520
|
+
tool_calls?: {
|
521
|
+
function: {
|
522
|
+
name: string;
|
523
|
+
arguments: string;
|
524
|
+
};
|
525
|
+
type: "function";
|
526
|
+
id: string;
|
527
|
+
}[] | undefined;
|
388
528
|
};
|
389
529
|
index: number;
|
390
530
|
logprobs?: any;
|
391
531
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
392
532
|
}, {
|
393
533
|
message: {
|
394
|
-
content: string | null;
|
395
534
|
role: "assistant";
|
535
|
+
content: string | null;
|
396
536
|
function_call?: {
|
397
537
|
name: string;
|
398
538
|
arguments: string;
|
399
539
|
} | undefined;
|
540
|
+
tool_calls?: {
|
541
|
+
function: {
|
542
|
+
name: string;
|
543
|
+
arguments: string;
|
544
|
+
};
|
545
|
+
type: "function";
|
546
|
+
id: string;
|
547
|
+
}[] | undefined;
|
400
548
|
};
|
401
549
|
index: number;
|
402
550
|
logprobs?: any;
|
@@ -430,12 +578,20 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
430
578
|
id: string;
|
431
579
|
choices: {
|
432
580
|
message: {
|
433
|
-
content: string | null;
|
434
581
|
role: "assistant";
|
582
|
+
content: string | null;
|
435
583
|
function_call?: {
|
436
584
|
name: string;
|
437
585
|
arguments: string;
|
438
586
|
} | undefined;
|
587
|
+
tool_calls?: {
|
588
|
+
function: {
|
589
|
+
name: string;
|
590
|
+
arguments: string;
|
591
|
+
};
|
592
|
+
type: "function";
|
593
|
+
id: string;
|
594
|
+
}[] | undefined;
|
439
595
|
};
|
440
596
|
index: number;
|
441
597
|
logprobs?: any;
|
@@ -454,12 +610,20 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
454
610
|
id: string;
|
455
611
|
choices: {
|
456
612
|
message: {
|
457
|
-
content: string | null;
|
458
613
|
role: "assistant";
|
614
|
+
content: string | null;
|
459
615
|
function_call?: {
|
460
616
|
name: string;
|
461
617
|
arguments: string;
|
462
618
|
} | undefined;
|
619
|
+
tool_calls?: {
|
620
|
+
function: {
|
621
|
+
name: string;
|
622
|
+
arguments: string;
|
623
|
+
};
|
624
|
+
type: "function";
|
625
|
+
id: string;
|
626
|
+
}[] | undefined;
|
463
627
|
};
|
464
628
|
index: number;
|
465
629
|
logprobs?: any;
|
@@ -490,12 +654,20 @@ export declare const OpenAIChatResponseFormat: {
|
|
490
654
|
id: string;
|
491
655
|
choices: {
|
492
656
|
message: {
|
493
|
-
content: string | null;
|
494
657
|
role: "assistant";
|
658
|
+
content: string | null;
|
495
659
|
function_call?: {
|
496
660
|
name: string;
|
497
661
|
arguments: string;
|
498
662
|
} | undefined;
|
663
|
+
tool_calls?: {
|
664
|
+
function: {
|
665
|
+
name: string;
|
666
|
+
arguments: string;
|
667
|
+
};
|
668
|
+
type: "function";
|
669
|
+
id: string;
|
670
|
+
}[] | undefined;
|
499
671
|
};
|
500
672
|
index: number;
|
501
673
|
logprobs?: any;
|
@@ -2,6 +2,7 @@ import SecureJSON from "secure-json-parse";
|
|
2
2
|
import { z } from "zod";
|
3
3
|
import { callWithRetryAndThrottle } from "../../../core/api/callWithRetryAndThrottle.js";
|
4
4
|
import { createJsonResponseHandler, postJsonToApi, } from "../../../core/api/postToApi.js";
|
5
|
+
import { parseJSON } from "../../../core/schema/parseJSON.js";
|
5
6
|
import { AbstractModel } from "../../../model-function/AbstractModel.js";
|
6
7
|
import { StructureParseError } from "../../../model-function/generate-structure/StructureParseError.js";
|
7
8
|
import { parsePartialJson } from "../../../model-function/generate-structure/parsePartialJson.js";
|
@@ -9,7 +10,7 @@ import { PromptFormatTextStreamingModel } from "../../../model-function/generate
|
|
9
10
|
import { OpenAIApiConfiguration } from "../OpenAIApiConfiguration.js";
|
10
11
|
import { failedOpenAICallResponseHandler } from "../OpenAIError.js";
|
11
12
|
import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
|
12
|
-
import {
|
13
|
+
import { chat, instruction } from "./OpenAIChatPromptFormat.js";
|
13
14
|
import { createOpenAIChatDeltaIterableQueue } from "./OpenAIChatStreamIterable.js";
|
14
15
|
import { countOpenAIChatPromptTokens } from "./countOpenAIChatMessageTokens.js";
|
15
16
|
/*
|
@@ -201,9 +202,11 @@ export class OpenAIChatModel extends AbstractModel {
|
|
201
202
|
throttle: this.settings.api?.throttle,
|
202
203
|
call: async () => callOpenAIChatCompletionAPI({
|
203
204
|
...this.settings,
|
204
|
-
// function calling:
|
205
|
+
// function & tool calling:
|
205
206
|
functions: options.functions ?? this.settings.functions,
|
206
207
|
functionCall: options.functionCall ?? this.settings.functionCall,
|
208
|
+
tools: options.tools ?? this.settings.tools,
|
209
|
+
toolChoice: options.toolChoice ?? this.settings.toolChoice,
|
207
210
|
// map to OpenAI API names:
|
208
211
|
stop: this.settings.stopSequences,
|
209
212
|
maxTokens: this.settings.maxCompletionTokens,
|
@@ -349,6 +352,37 @@ export class OpenAIChatModel extends AbstractModel {
|
|
349
352
|
});
|
350
353
|
}
|
351
354
|
}
|
355
|
+
async doGenerateToolCall(tool, prompt, options) {
|
356
|
+
const response = await this.callAPI(prompt, {
|
357
|
+
...options,
|
358
|
+
responseFormat: OpenAIChatResponseFormat.json,
|
359
|
+
toolChoice: {
|
360
|
+
type: "function",
|
361
|
+
function: { name: tool.name },
|
362
|
+
},
|
363
|
+
tools: [
|
364
|
+
{
|
365
|
+
type: "function",
|
366
|
+
function: {
|
367
|
+
name: tool.name,
|
368
|
+
description: tool.description,
|
369
|
+
parameters: tool.parameters.getJsonSchema(),
|
370
|
+
},
|
371
|
+
},
|
372
|
+
],
|
373
|
+
});
|
374
|
+
const toolCalls = response.choices[0]?.message.tool_calls;
|
375
|
+
return {
|
376
|
+
response,
|
377
|
+
value: toolCalls == null || toolCalls.length === 0
|
378
|
+
? null
|
379
|
+
: {
|
380
|
+
id: toolCalls[0].id,
|
381
|
+
parameters: parseJSON({ text: toolCalls[0].function.arguments }),
|
382
|
+
},
|
383
|
+
usage: this.extractUsage(response),
|
384
|
+
};
|
385
|
+
}
|
352
386
|
extractUsage(response) {
|
353
387
|
return {
|
354
388
|
promptTokens: response.usage.prompt_tokens,
|
@@ -360,13 +394,13 @@ export class OpenAIChatModel extends AbstractModel {
|
|
360
394
|
* Returns this model with an instruction prompt format.
|
361
395
|
*/
|
362
396
|
withInstructionPrompt() {
|
363
|
-
return this.withPromptFormat(
|
397
|
+
return this.withPromptFormat(instruction());
|
364
398
|
}
|
365
399
|
/**
|
366
400
|
* Returns this model with a chat prompt format.
|
367
401
|
*/
|
368
402
|
withChatPrompt() {
|
369
|
-
return this.withPromptFormat(
|
403
|
+
return this.withPromptFormat(chat());
|
370
404
|
}
|
371
405
|
withPromptFormat(promptFormat) {
|
372
406
|
return new PromptFormatTextStreamingModel({
|
@@ -395,6 +429,16 @@ const openAIChatResponseSchema = z.object({
|
|
395
429
|
arguments: z.string(),
|
396
430
|
})
|
397
431
|
.optional(),
|
432
|
+
tool_calls: z
|
433
|
+
.array(z.object({
|
434
|
+
id: z.string(),
|
435
|
+
type: z.literal("function"),
|
436
|
+
function: z.object({
|
437
|
+
name: z.string(),
|
438
|
+
arguments: z.string(),
|
439
|
+
}),
|
440
|
+
}))
|
441
|
+
.optional(),
|
398
442
|
}),
|
399
443
|
index: z.number(),
|
400
444
|
logprobs: z.nullable(z.any()),
|
@@ -419,7 +463,7 @@ const openAIChatResponseSchema = z.object({
|
|
419
463
|
total_tokens: z.number(),
|
420
464
|
}),
|
421
465
|
});
|
422
|
-
async function callOpenAIChatCompletionAPI({ api = new OpenAIApiConfiguration(), abortSignal, responseFormat, model, messages, functions, functionCall, temperature, topP, n, stop, maxTokens, presencePenalty, frequencyPenalty, logitBias, user, openAIResponseFormat, seed, }) {
|
466
|
+
async function callOpenAIChatCompletionAPI({ api = new OpenAIApiConfiguration(), abortSignal, responseFormat, model, messages, functions, functionCall, tools, toolChoice, temperature, topP, n, stop, maxTokens, presencePenalty, frequencyPenalty, logitBias, user, openAIResponseFormat, seed, }) {
|
423
467
|
// empty arrays are not allowed for stop:
|
424
468
|
if (stop != null && Array.isArray(stop) && stop.length === 0) {
|
425
469
|
stop = undefined;
|
@@ -433,6 +477,8 @@ async function callOpenAIChatCompletionAPI({ api = new OpenAIApiConfiguration(),
|
|
433
477
|
messages,
|
434
478
|
functions,
|
435
479
|
function_call: functionCall,
|
480
|
+
tools,
|
481
|
+
tool_choice: toolChoice,
|
436
482
|
temperature,
|
437
483
|
top_p: topP,
|
438
484
|
n,
|
@@ -1,12 +1,12 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.
|
3
|
+
exports.chat = exports.instruction = void 0;
|
4
4
|
const validateChatPrompt_js_1 = require("../../../model-function/generate-text/prompt-format/validateChatPrompt.cjs");
|
5
5
|
const OpenAIChatMessage_js_1 = require("./OpenAIChatMessage.cjs");
|
6
6
|
/**
|
7
7
|
* Formats an instruction prompt as an OpenAI chat prompt.
|
8
8
|
*/
|
9
|
-
function
|
9
|
+
function instruction() {
|
10
10
|
return {
|
11
11
|
format: (instruction) => {
|
12
12
|
const messages = [];
|
@@ -24,49 +24,37 @@ function mapInstructionPromptToOpenAIChatFormat() {
|
|
24
24
|
stopSequences: [],
|
25
25
|
};
|
26
26
|
}
|
27
|
-
exports.
|
27
|
+
exports.instruction = instruction;
|
28
28
|
/**
|
29
29
|
* Formats a chat prompt as an OpenAI chat prompt.
|
30
30
|
*/
|
31
|
-
function
|
31
|
+
function chat() {
|
32
32
|
return {
|
33
33
|
format: (chatPrompt) => {
|
34
34
|
(0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
|
35
35
|
const messages = [];
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
role:
|
52
|
-
|
53
|
-
});
|
54
|
-
continue;
|
55
|
-
}
|
56
|
-
// ai message:
|
57
|
-
if ("ai" in message) {
|
58
|
-
messages.push({
|
59
|
-
role: "assistant",
|
60
|
-
content: message.ai,
|
61
|
-
});
|
62
|
-
continue;
|
36
|
+
if (chatPrompt.system != null) {
|
37
|
+
messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.system(chatPrompt.system));
|
38
|
+
}
|
39
|
+
for (const { role, content } of chatPrompt.messages) {
|
40
|
+
switch (role) {
|
41
|
+
case "user": {
|
42
|
+
messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.user(content));
|
43
|
+
break;
|
44
|
+
}
|
45
|
+
case "assistant": {
|
46
|
+
messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.assistant(content));
|
47
|
+
break;
|
48
|
+
}
|
49
|
+
default: {
|
50
|
+
const _exhaustiveCheck = role;
|
51
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
52
|
+
}
|
63
53
|
}
|
64
|
-
// unsupported message:
|
65
|
-
throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
|
66
54
|
}
|
67
55
|
return messages;
|
68
56
|
},
|
69
57
|
stopSequences: [],
|
70
58
|
};
|
71
59
|
}
|
72
|
-
exports.
|
60
|
+
exports.chat = chat;
|