modelfusion 0.69.0 → 0.71.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +26 -13
- package/model-function/ModelCallEvent.d.ts +3 -2
- package/model-function/generate-text/index.cjs +1 -8
- package/model-function/generate-text/index.d.ts +1 -8
- package/model-function/generate-text/index.js +1 -8
- package/model-function/generate-text/prompt-format/AlpacaPromptFormat.cjs +31 -3
- package/model-function/generate-text/prompt-format/AlpacaPromptFormat.d.ts +29 -1
- package/model-function/generate-text/prompt-format/AlpacaPromptFormat.js +29 -1
- package/model-function/generate-text/prompt-format/ChatMLPromptFormat.cjs +79 -0
- package/model-function/generate-text/prompt-format/ChatMLPromptFormat.d.ts +31 -0
- package/model-function/generate-text/prompt-format/ChatMLPromptFormat.js +74 -0
- package/model-function/generate-text/prompt-format/ChatPrompt.d.ts +28 -23
- package/model-function/generate-text/prompt-format/ChatPromptValidationError.cjs +17 -0
- package/model-function/generate-text/prompt-format/ChatPromptValidationError.d.ts +8 -0
- package/model-function/generate-text/prompt-format/ChatPromptValidationError.js +13 -0
- package/model-function/generate-text/prompt-format/Llama2PromptFormat.cjs +41 -27
- package/model-function/generate-text/prompt-format/Llama2PromptFormat.d.ts +20 -2
- package/model-function/generate-text/prompt-format/Llama2PromptFormat.js +38 -24
- package/model-function/generate-text/prompt-format/TextPromptFormat.cjs +27 -30
- package/model-function/generate-text/prompt-format/TextPromptFormat.d.ts +7 -5
- package/model-function/generate-text/prompt-format/TextPromptFormat.js +24 -27
- package/model-function/generate-text/prompt-format/VicunaPromptFormat.cjs +21 -29
- package/model-function/generate-text/prompt-format/VicunaPromptFormat.d.ts +2 -2
- package/model-function/generate-text/prompt-format/VicunaPromptFormat.js +19 -27
- package/model-function/generate-text/prompt-format/index.cjs +39 -0
- package/model-function/generate-text/prompt-format/index.d.ts +10 -0
- package/model-function/generate-text/prompt-format/index.js +10 -0
- package/model-function/generate-text/prompt-format/trimChatPrompt.cjs +17 -22
- package/model-function/generate-text/prompt-format/trimChatPrompt.js +17 -22
- package/model-function/generate-text/prompt-format/validateChatPrompt.cjs +12 -24
- package/model-function/generate-text/prompt-format/validateChatPrompt.d.ts +0 -3
- package/model-function/generate-text/prompt-format/validateChatPrompt.js +10 -21
- package/model-function/generate-tool-call/NoSuchToolDefinitionError.cjs +41 -0
- package/model-function/generate-tool-call/NoSuchToolDefinitionError.d.ts +17 -0
- package/model-function/generate-tool-call/NoSuchToolDefinitionError.js +37 -0
- package/model-function/generate-tool-call/ToolCall.d.ts +5 -0
- package/model-function/generate-tool-call/ToolCallGenerationModel.d.ts +3 -3
- package/model-function/generate-tool-call/ToolCallParametersValidationError.cjs +1 -1
- package/model-function/generate-tool-call/ToolCallParametersValidationError.js +1 -1
- package/model-function/generate-tool-call/ToolCallsOrTextGenerationEvent.cjs +2 -0
- package/model-function/generate-tool-call/ToolCallsOrTextGenerationEvent.d.ts +23 -0
- package/model-function/generate-tool-call/ToolCallsOrTextGenerationEvent.js +1 -0
- package/model-function/generate-tool-call/ToolCallsOrTextGenerationModel.cjs +2 -0
- package/model-function/generate-tool-call/ToolCallsOrTextGenerationModel.d.ts +21 -0
- package/model-function/generate-tool-call/ToolCallsOrTextGenerationModel.js +1 -0
- package/model-function/generate-tool-call/ToolDefinition.cjs +2 -0
- package/model-function/generate-tool-call/{ToolCallDefinition.d.ts → ToolDefinition.d.ts} +1 -1
- package/model-function/generate-tool-call/ToolDefinition.js +1 -0
- package/model-function/generate-tool-call/generateToolCall.cjs +2 -1
- package/model-function/generate-tool-call/generateToolCall.d.ts +6 -11
- package/model-function/generate-tool-call/generateToolCall.js +2 -1
- package/model-function/generate-tool-call/generateToolCallsOrText.cjs +63 -0
- package/model-function/generate-tool-call/generateToolCallsOrText.d.ts +33 -0
- package/model-function/generate-tool-call/generateToolCallsOrText.js +59 -0
- package/model-function/generate-tool-call/index.cjs +7 -2
- package/model-function/generate-tool-call/index.d.ts +7 -2
- package/model-function/generate-tool-call/index.js +7 -2
- package/model-provider/anthropic/AnthropicPromptFormat.cjs +22 -26
- package/model-provider/anthropic/AnthropicPromptFormat.d.ts +4 -2
- package/model-provider/anthropic/AnthropicPromptFormat.js +19 -23
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +2 -2
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +3 -3
- package/model-provider/anthropic/index.cjs +14 -2
- package/model-provider/anthropic/index.d.ts +1 -1
- package/model-provider/anthropic/index.js +1 -1
- package/model-provider/cohere/CohereTextGenerationModel.cjs +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +1 -1
- package/model-provider/cohere/CohereTextGenerationModel.js +4 -4
- package/model-provider/llamacpp/{mapInstructionPromptToBakLLaVA1ForLlamaCppFormat.cjs → LlamaCppBakLLaVA1Format.cjs} +4 -4
- package/model-provider/llamacpp/{mapInstructionPromptToBakLLaVA1ForLlamaCppFormat.d.ts → LlamaCppBakLLaVA1Format.d.ts} +2 -2
- package/model-provider/llamacpp/{mapInstructionPromptToBakLLaVA1ForLlamaCppFormat.js → LlamaCppBakLLaVA1Format.js} +2 -2
- package/model-provider/llamacpp/index.cjs +14 -2
- package/model-provider/llamacpp/index.d.ts +1 -1
- package/model-provider/llamacpp/index.js +1 -1
- package/model-provider/openai/OpenAICompletionModel.cjs +4 -4
- package/model-provider/openai/OpenAICompletionModel.d.ts +1 -1
- package/model-provider/openai/OpenAICompletionModel.js +5 -5
- package/model-provider/openai/chat/OpenAIChatMessage.d.ts +4 -1
- package/model-provider/openai/chat/OpenAIChatModel.cjs +29 -3
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +63 -16
- package/model-provider/openai/chat/OpenAIChatModel.js +30 -4
- package/model-provider/openai/chat/OpenAIChatPromptFormat.cjs +22 -34
- package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +2 -2
- package/model-provider/openai/chat/OpenAIChatPromptFormat.js +19 -31
- package/model-provider/openai/index.cjs +14 -2
- package/model-provider/openai/index.d.ts +1 -1
- package/model-provider/openai/index.js +1 -1
- package/package.json +2 -2
- package/tool/Tool.cjs +1 -1
- package/tool/Tool.d.ts +1 -1
- package/tool/Tool.js +1 -1
- /package/model-function/generate-tool-call/{ToolCallDefinition.cjs → ToolCall.cjs} +0 -0
- /package/model-function/generate-tool-call/{ToolCallDefinition.js → ToolCall.js} +0 -0
@@ -383,7 +383,7 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
|
|
383
383
|
const toolCalls = response.choices[0]?.message.tool_calls;
|
384
384
|
return {
|
385
385
|
response,
|
386
|
-
|
386
|
+
toolCall: toolCalls == null || toolCalls.length === 0
|
387
387
|
? null
|
388
388
|
: {
|
389
389
|
id: toolCalls[0].id,
|
@@ -392,6 +392,32 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
|
|
392
392
|
usage: this.extractUsage(response),
|
393
393
|
};
|
394
394
|
}
|
395
|
+
async doGenerateToolCallsOrText(tools, prompt, options) {
|
396
|
+
const response = await this.callAPI(prompt, {
|
397
|
+
...options,
|
398
|
+
responseFormat: exports.OpenAIChatResponseFormat.json,
|
399
|
+
toolChoice: "auto",
|
400
|
+
tools: tools.map((tool) => ({
|
401
|
+
type: "function",
|
402
|
+
function: {
|
403
|
+
name: tool.name,
|
404
|
+
description: tool.description,
|
405
|
+
parameters: tool.parameters.getJsonSchema(),
|
406
|
+
},
|
407
|
+
})),
|
408
|
+
});
|
409
|
+
const message = response.choices[0]?.message;
|
410
|
+
return {
|
411
|
+
response,
|
412
|
+
text: message.content ?? null,
|
413
|
+
toolCalls: message.tool_calls?.map((toolCall) => ({
|
414
|
+
id: toolCall.id,
|
415
|
+
name: toolCall.function.name,
|
416
|
+
parameters: (0, parseJSON_js_1.parseJSON)({ text: toolCall.function.arguments }),
|
417
|
+
})) ?? null,
|
418
|
+
usage: this.extractUsage(response),
|
419
|
+
};
|
420
|
+
}
|
395
421
|
extractUsage(response) {
|
396
422
|
return {
|
397
423
|
promptTokens: response.usage.prompt_tokens,
|
@@ -403,13 +429,13 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
|
|
403
429
|
* Returns this model with an instruction prompt format.
|
404
430
|
*/
|
405
431
|
withInstructionPrompt() {
|
406
|
-
return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.
|
432
|
+
return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.instruction)());
|
407
433
|
}
|
408
434
|
/**
|
409
435
|
* Returns this model with a chat prompt format.
|
410
436
|
*/
|
411
437
|
withChatPrompt() {
|
412
|
-
return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.
|
438
|
+
return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.chat)());
|
413
439
|
}
|
414
440
|
withPromptFormat(promptFormat) {
|
415
441
|
return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
|
@@ -10,8 +10,9 @@ import { StructureOrTextGenerationModel } from "../../../model-function/generate
|
|
10
10
|
import { PromptFormatTextStreamingModel } from "../../../model-function/generate-text/PromptFormatTextStreamingModel.js";
|
11
11
|
import { TextGenerationModelSettings, TextStreamingModel } from "../../../model-function/generate-text/TextGenerationModel.js";
|
12
12
|
import { TextGenerationPromptFormat } from "../../../model-function/generate-text/TextGenerationPromptFormat.js";
|
13
|
-
import { ToolCallDefinition } from "../../../model-function/generate-tool-call/ToolCallDefinition.js";
|
14
13
|
import { ToolCallGenerationModel } from "../../../model-function/generate-tool-call/ToolCallGenerationModel.js";
|
14
|
+
import { ToolCallsOrTextGenerationModel } from "../../../model-function/generate-tool-call/ToolCallsOrTextGenerationModel.js";
|
15
|
+
import { ToolDefinition } from "../../../model-function/generate-tool-call/ToolDefinition.js";
|
15
16
|
import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
|
16
17
|
import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
|
17
18
|
export declare const OPENAI_CHAT_MODELS: {
|
@@ -168,7 +169,7 @@ export interface OpenAIChatSettings extends TextGenerationModelSettings, Omit<Op
|
|
168
169
|
* ),
|
169
170
|
* ]);
|
170
171
|
*/
|
171
|
-
export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextStreamingModel<OpenAIChatMessage[], OpenAIChatSettings>, StructureGenerationModel<OpenAIChatMessage[], OpenAIChatSettings>, StructureOrTextGenerationModel<OpenAIChatMessage[], OpenAIChatSettings>, ToolCallGenerationModel<OpenAIChatMessage[], OpenAIChatSettings> {
|
172
|
+
export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextStreamingModel<OpenAIChatMessage[], OpenAIChatSettings>, StructureGenerationModel<OpenAIChatMessage[], OpenAIChatSettings>, StructureOrTextGenerationModel<OpenAIChatMessage[], OpenAIChatSettings>, ToolCallGenerationModel<OpenAIChatMessage[], OpenAIChatSettings>, ToolCallsOrTextGenerationModel<OpenAIChatMessage[], OpenAIChatSettings> {
|
172
173
|
constructor(settings: OpenAIChatSettings);
|
173
174
|
readonly provider: "openai";
|
174
175
|
get modelName(): OpenAIChatModelType;
|
@@ -200,8 +201,8 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
200
201
|
id: string;
|
201
202
|
choices: {
|
202
203
|
message: {
|
203
|
-
content: string | null;
|
204
204
|
role: "assistant";
|
205
|
+
content: string | null;
|
205
206
|
function_call?: {
|
206
207
|
name: string;
|
207
208
|
arguments: string;
|
@@ -249,8 +250,8 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
249
250
|
id: string;
|
250
251
|
choices: {
|
251
252
|
message: {
|
252
|
-
content: string | null;
|
253
253
|
role: "assistant";
|
254
|
+
content: string | null;
|
254
255
|
function_call?: {
|
255
256
|
name: string;
|
256
257
|
arguments: string;
|
@@ -292,8 +293,8 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
292
293
|
id: string;
|
293
294
|
choices: {
|
294
295
|
message: {
|
295
|
-
content: string | null;
|
296
296
|
role: "assistant";
|
297
|
+
content: string | null;
|
297
298
|
function_call?: {
|
298
299
|
name: string;
|
299
300
|
arguments: string;
|
@@ -337,8 +338,8 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
337
338
|
id: string;
|
338
339
|
choices: {
|
339
340
|
message: {
|
340
|
-
content: string | null;
|
341
341
|
role: "assistant";
|
342
|
+
content: string | null;
|
342
343
|
function_call?: {
|
343
344
|
name: string;
|
344
345
|
arguments: string;
|
@@ -371,7 +372,7 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
371
372
|
totalTokens: number;
|
372
373
|
};
|
373
374
|
}>;
|
374
|
-
doGenerateToolCall(tool:
|
375
|
+
doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<{
|
375
376
|
response: {
|
376
377
|
object: "chat.completion";
|
377
378
|
usage: {
|
@@ -383,8 +384,8 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
383
384
|
id: string;
|
384
385
|
choices: {
|
385
386
|
message: {
|
386
|
-
content: string | null;
|
387
387
|
role: "assistant";
|
388
|
+
content: string | null;
|
388
389
|
function_call?: {
|
389
390
|
name: string;
|
390
391
|
arguments: string;
|
@@ -405,7 +406,7 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
405
406
|
created: number;
|
406
407
|
system_fingerprint?: string | undefined;
|
407
408
|
};
|
408
|
-
|
409
|
+
toolCall: {
|
409
410
|
id: string;
|
410
411
|
parameters: unknown;
|
411
412
|
} | null;
|
@@ -415,6 +416,52 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
415
416
|
totalTokens: number;
|
416
417
|
};
|
417
418
|
}>;
|
419
|
+
doGenerateToolCallsOrText(tools: Array<ToolDefinition<string, unknown>>, prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<{
|
420
|
+
response: {
|
421
|
+
object: "chat.completion";
|
422
|
+
usage: {
|
423
|
+
prompt_tokens: number;
|
424
|
+
total_tokens: number;
|
425
|
+
completion_tokens: number;
|
426
|
+
};
|
427
|
+
model: string;
|
428
|
+
id: string;
|
429
|
+
choices: {
|
430
|
+
message: {
|
431
|
+
role: "assistant";
|
432
|
+
content: string | null;
|
433
|
+
function_call?: {
|
434
|
+
name: string;
|
435
|
+
arguments: string;
|
436
|
+
} | undefined;
|
437
|
+
tool_calls?: {
|
438
|
+
function: {
|
439
|
+
name: string;
|
440
|
+
arguments: string;
|
441
|
+
};
|
442
|
+
type: "function";
|
443
|
+
id: string;
|
444
|
+
}[] | undefined;
|
445
|
+
};
|
446
|
+
index: number;
|
447
|
+
logprobs?: any;
|
448
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
449
|
+
}[];
|
450
|
+
created: number;
|
451
|
+
system_fingerprint?: string | undefined;
|
452
|
+
};
|
453
|
+
text: string | null;
|
454
|
+
toolCalls: {
|
455
|
+
id: string;
|
456
|
+
name: string;
|
457
|
+
parameters: unknown;
|
458
|
+
}[] | null;
|
459
|
+
usage: {
|
460
|
+
promptTokens: number;
|
461
|
+
completionTokens: number;
|
462
|
+
totalTokens: number;
|
463
|
+
};
|
464
|
+
}>;
|
418
465
|
extractUsage(response: OpenAIChatResponse): {
|
419
466
|
promptTokens: number;
|
420
467
|
completionTokens: number;
|
@@ -476,8 +523,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
476
523
|
id: string;
|
477
524
|
}>, "many">>;
|
478
525
|
}, "strip", z.ZodTypeAny, {
|
479
|
-
content: string | null;
|
480
526
|
role: "assistant";
|
527
|
+
content: string | null;
|
481
528
|
function_call?: {
|
482
529
|
name: string;
|
483
530
|
arguments: string;
|
@@ -491,8 +538,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
491
538
|
id: string;
|
492
539
|
}[] | undefined;
|
493
540
|
}, {
|
494
|
-
content: string | null;
|
495
541
|
role: "assistant";
|
542
|
+
content: string | null;
|
496
543
|
function_call?: {
|
497
544
|
name: string;
|
498
545
|
arguments: string;
|
@@ -511,8 +558,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
511
558
|
finish_reason: z.ZodNullable<z.ZodOptional<z.ZodEnum<["stop", "length", "tool_calls", "content_filter", "function_call"]>>>;
|
512
559
|
}, "strip", z.ZodTypeAny, {
|
513
560
|
message: {
|
514
|
-
content: string | null;
|
515
561
|
role: "assistant";
|
562
|
+
content: string | null;
|
516
563
|
function_call?: {
|
517
564
|
name: string;
|
518
565
|
arguments: string;
|
@@ -531,8 +578,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
531
578
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
532
579
|
}, {
|
533
580
|
message: {
|
534
|
-
content: string | null;
|
535
581
|
role: "assistant";
|
582
|
+
content: string | null;
|
536
583
|
function_call?: {
|
537
584
|
name: string;
|
538
585
|
arguments: string;
|
@@ -578,8 +625,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
578
625
|
id: string;
|
579
626
|
choices: {
|
580
627
|
message: {
|
581
|
-
content: string | null;
|
582
628
|
role: "assistant";
|
629
|
+
content: string | null;
|
583
630
|
function_call?: {
|
584
631
|
name: string;
|
585
632
|
arguments: string;
|
@@ -610,8 +657,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
610
657
|
id: string;
|
611
658
|
choices: {
|
612
659
|
message: {
|
613
|
-
content: string | null;
|
614
660
|
role: "assistant";
|
661
|
+
content: string | null;
|
615
662
|
function_call?: {
|
616
663
|
name: string;
|
617
664
|
arguments: string;
|
@@ -654,8 +701,8 @@ export declare const OpenAIChatResponseFormat: {
|
|
654
701
|
id: string;
|
655
702
|
choices: {
|
656
703
|
message: {
|
657
|
-
content: string | null;
|
658
704
|
role: "assistant";
|
705
|
+
content: string | null;
|
659
706
|
function_call?: {
|
660
707
|
name: string;
|
661
708
|
arguments: string;
|
@@ -10,7 +10,7 @@ import { PromptFormatTextStreamingModel } from "../../../model-function/generate
|
|
10
10
|
import { OpenAIApiConfiguration } from "../OpenAIApiConfiguration.js";
|
11
11
|
import { failedOpenAICallResponseHandler } from "../OpenAIError.js";
|
12
12
|
import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
|
13
|
-
import {
|
13
|
+
import { chat, instruction } from "./OpenAIChatPromptFormat.js";
|
14
14
|
import { createOpenAIChatDeltaIterableQueue } from "./OpenAIChatStreamIterable.js";
|
15
15
|
import { countOpenAIChatPromptTokens } from "./countOpenAIChatMessageTokens.js";
|
16
16
|
/*
|
@@ -374,7 +374,7 @@ export class OpenAIChatModel extends AbstractModel {
|
|
374
374
|
const toolCalls = response.choices[0]?.message.tool_calls;
|
375
375
|
return {
|
376
376
|
response,
|
377
|
-
|
377
|
+
toolCall: toolCalls == null || toolCalls.length === 0
|
378
378
|
? null
|
379
379
|
: {
|
380
380
|
id: toolCalls[0].id,
|
@@ -383,6 +383,32 @@ export class OpenAIChatModel extends AbstractModel {
|
|
383
383
|
usage: this.extractUsage(response),
|
384
384
|
};
|
385
385
|
}
|
386
|
+
async doGenerateToolCallsOrText(tools, prompt, options) {
|
387
|
+
const response = await this.callAPI(prompt, {
|
388
|
+
...options,
|
389
|
+
responseFormat: OpenAIChatResponseFormat.json,
|
390
|
+
toolChoice: "auto",
|
391
|
+
tools: tools.map((tool) => ({
|
392
|
+
type: "function",
|
393
|
+
function: {
|
394
|
+
name: tool.name,
|
395
|
+
description: tool.description,
|
396
|
+
parameters: tool.parameters.getJsonSchema(),
|
397
|
+
},
|
398
|
+
})),
|
399
|
+
});
|
400
|
+
const message = response.choices[0]?.message;
|
401
|
+
return {
|
402
|
+
response,
|
403
|
+
text: message.content ?? null,
|
404
|
+
toolCalls: message.tool_calls?.map((toolCall) => ({
|
405
|
+
id: toolCall.id,
|
406
|
+
name: toolCall.function.name,
|
407
|
+
parameters: parseJSON({ text: toolCall.function.arguments }),
|
408
|
+
})) ?? null,
|
409
|
+
usage: this.extractUsage(response),
|
410
|
+
};
|
411
|
+
}
|
386
412
|
extractUsage(response) {
|
387
413
|
return {
|
388
414
|
promptTokens: response.usage.prompt_tokens,
|
@@ -394,13 +420,13 @@ export class OpenAIChatModel extends AbstractModel {
|
|
394
420
|
* Returns this model with an instruction prompt format.
|
395
421
|
*/
|
396
422
|
withInstructionPrompt() {
|
397
|
-
return this.withPromptFormat(
|
423
|
+
return this.withPromptFormat(instruction());
|
398
424
|
}
|
399
425
|
/**
|
400
426
|
* Returns this model with a chat prompt format.
|
401
427
|
*/
|
402
428
|
withChatPrompt() {
|
403
|
-
return this.withPromptFormat(
|
429
|
+
return this.withPromptFormat(chat());
|
404
430
|
}
|
405
431
|
withPromptFormat(promptFormat) {
|
406
432
|
return new PromptFormatTextStreamingModel({
|
@@ -1,12 +1,12 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.
|
3
|
+
exports.chat = exports.instruction = void 0;
|
4
4
|
const validateChatPrompt_js_1 = require("../../../model-function/generate-text/prompt-format/validateChatPrompt.cjs");
|
5
5
|
const OpenAIChatMessage_js_1 = require("./OpenAIChatMessage.cjs");
|
6
6
|
/**
|
7
7
|
* Formats an instruction prompt as an OpenAI chat prompt.
|
8
8
|
*/
|
9
|
-
function
|
9
|
+
function instruction() {
|
10
10
|
return {
|
11
11
|
format: (instruction) => {
|
12
12
|
const messages = [];
|
@@ -24,49 +24,37 @@ function mapInstructionPromptToOpenAIChatFormat() {
|
|
24
24
|
stopSequences: [],
|
25
25
|
};
|
26
26
|
}
|
27
|
-
exports.
|
27
|
+
exports.instruction = instruction;
|
28
28
|
/**
|
29
29
|
* Formats a chat prompt as an OpenAI chat prompt.
|
30
30
|
*/
|
31
|
-
function
|
31
|
+
function chat() {
|
32
32
|
return {
|
33
33
|
format: (chatPrompt) => {
|
34
34
|
(0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
|
35
35
|
const messages = [];
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
role:
|
52
|
-
|
53
|
-
});
|
54
|
-
continue;
|
55
|
-
}
|
56
|
-
// ai message:
|
57
|
-
if ("ai" in message) {
|
58
|
-
messages.push({
|
59
|
-
role: "assistant",
|
60
|
-
content: message.ai,
|
61
|
-
});
|
62
|
-
continue;
|
36
|
+
if (chatPrompt.system != null) {
|
37
|
+
messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.system(chatPrompt.system));
|
38
|
+
}
|
39
|
+
for (const { role, content } of chatPrompt.messages) {
|
40
|
+
switch (role) {
|
41
|
+
case "user": {
|
42
|
+
messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.user(content));
|
43
|
+
break;
|
44
|
+
}
|
45
|
+
case "assistant": {
|
46
|
+
messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.assistant(content));
|
47
|
+
break;
|
48
|
+
}
|
49
|
+
default: {
|
50
|
+
const _exhaustiveCheck = role;
|
51
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
52
|
+
}
|
63
53
|
}
|
64
|
-
// unsupported message:
|
65
|
-
throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
|
66
54
|
}
|
67
55
|
return messages;
|
68
56
|
},
|
69
57
|
stopSequences: [],
|
70
58
|
};
|
71
59
|
}
|
72
|
-
exports.
|
60
|
+
exports.chat = chat;
|
@@ -5,8 +5,8 @@ import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
|
|
5
5
|
/**
|
6
6
|
* Formats an instruction prompt as an OpenAI chat prompt.
|
7
7
|
*/
|
8
|
-
export declare function
|
8
|
+
export declare function instruction(): TextGenerationPromptFormat<InstructionPrompt, Array<OpenAIChatMessage>>;
|
9
9
|
/**
|
10
10
|
* Formats a chat prompt as an OpenAI chat prompt.
|
11
11
|
*/
|
12
|
-
export declare function
|
12
|
+
export declare function chat(): TextGenerationPromptFormat<ChatPrompt, Array<OpenAIChatMessage>>;
|
@@ -3,7 +3,7 @@ import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
|
|
3
3
|
/**
|
4
4
|
* Formats an instruction prompt as an OpenAI chat prompt.
|
5
5
|
*/
|
6
|
-
export function
|
6
|
+
export function instruction() {
|
7
7
|
return {
|
8
8
|
format: (instruction) => {
|
9
9
|
const messages = [];
|
@@ -24,41 +24,29 @@ export function mapInstructionPromptToOpenAIChatFormat() {
|
|
24
24
|
/**
|
25
25
|
* Formats a chat prompt as an OpenAI chat prompt.
|
26
26
|
*/
|
27
|
-
export function
|
27
|
+
export function chat() {
|
28
28
|
return {
|
29
29
|
format: (chatPrompt) => {
|
30
30
|
validateChatPrompt(chatPrompt);
|
31
31
|
const messages = [];
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
role:
|
48
|
-
|
49
|
-
});
|
50
|
-
continue;
|
51
|
-
}
|
52
|
-
// ai message:
|
53
|
-
if ("ai" in message) {
|
54
|
-
messages.push({
|
55
|
-
role: "assistant",
|
56
|
-
content: message.ai,
|
57
|
-
});
|
58
|
-
continue;
|
32
|
+
if (chatPrompt.system != null) {
|
33
|
+
messages.push(OpenAIChatMessage.system(chatPrompt.system));
|
34
|
+
}
|
35
|
+
for (const { role, content } of chatPrompt.messages) {
|
36
|
+
switch (role) {
|
37
|
+
case "user": {
|
38
|
+
messages.push(OpenAIChatMessage.user(content));
|
39
|
+
break;
|
40
|
+
}
|
41
|
+
case "assistant": {
|
42
|
+
messages.push(OpenAIChatMessage.assistant(content));
|
43
|
+
break;
|
44
|
+
}
|
45
|
+
default: {
|
46
|
+
const _exhaustiveCheck = role;
|
47
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
48
|
+
}
|
59
49
|
}
|
60
|
-
// unsupported message:
|
61
|
-
throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
|
62
50
|
}
|
63
51
|
return messages;
|
64
52
|
},
|
@@ -10,11 +10,23 @@ var __createBinding = (this && this.__createBinding) || (Object.create ? (functi
|
|
10
10
|
if (k2 === undefined) k2 = k;
|
11
11
|
o[k2] = m[k];
|
12
12
|
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
13
18
|
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
14
19
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
15
20
|
};
|
21
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
22
|
+
if (mod && mod.__esModule) return mod;
|
23
|
+
var result = {};
|
24
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
25
|
+
__setModuleDefault(result, mod);
|
26
|
+
return result;
|
27
|
+
};
|
16
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
17
|
-
exports.OpenAIError = void 0;
|
29
|
+
exports.OpenAIChatPromptFormat = exports.OpenAIError = void 0;
|
18
30
|
__exportStar(require("./AzureOpenAIApiConfiguration.cjs"), exports);
|
19
31
|
__exportStar(require("./OpenAIApiConfiguration.cjs"), exports);
|
20
32
|
__exportStar(require("./OpenAICompletionModel.cjs"), exports);
|
@@ -28,5 +40,5 @@ __exportStar(require("./OpenAITranscriptionModel.cjs"), exports);
|
|
28
40
|
__exportStar(require("./TikTokenTokenizer.cjs"), exports);
|
29
41
|
__exportStar(require("./chat/OpenAIChatMessage.cjs"), exports);
|
30
42
|
__exportStar(require("./chat/OpenAIChatModel.cjs"), exports);
|
31
|
-
|
43
|
+
exports.OpenAIChatPromptFormat = __importStar(require("./chat/OpenAIChatPromptFormat.cjs"));
|
32
44
|
__exportStar(require("./chat/countOpenAIChatMessageTokens.cjs"), exports);
|
@@ -10,6 +10,6 @@ export * from "./OpenAITranscriptionModel.js";
|
|
10
10
|
export * from "./TikTokenTokenizer.js";
|
11
11
|
export * from "./chat/OpenAIChatMessage.js";
|
12
12
|
export * from "./chat/OpenAIChatModel.js";
|
13
|
-
export * from "./chat/OpenAIChatPromptFormat.js";
|
13
|
+
export * as OpenAIChatPromptFormat from "./chat/OpenAIChatPromptFormat.js";
|
14
14
|
export { OpenAIChatDelta } from "./chat/OpenAIChatStreamIterable.js";
|
15
15
|
export * from "./chat/countOpenAIChatMessageTokens.js";
|
@@ -10,5 +10,5 @@ export * from "./OpenAITranscriptionModel.js";
|
|
10
10
|
export * from "./TikTokenTokenizer.js";
|
11
11
|
export * from "./chat/OpenAIChatMessage.js";
|
12
12
|
export * from "./chat/OpenAIChatModel.js";
|
13
|
-
export * from "./chat/OpenAIChatPromptFormat.js";
|
13
|
+
export * as OpenAIChatPromptFormat from "./chat/OpenAIChatPromptFormat.js";
|
14
14
|
export * from "./chat/countOpenAIChatMessageTokens.js";
|
package/package.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
{
|
2
2
|
"name": "modelfusion",
|
3
|
-
"description": "
|
4
|
-
"version": "0.
|
3
|
+
"description": "The TypeScript library for building multi-modal AI applications.",
|
4
|
+
"version": "0.71.0",
|
5
5
|
"author": "Lars Grammel",
|
6
6
|
"license": "MIT",
|
7
7
|
"keywords": [
|
package/tool/Tool.cjs
CHANGED
@@ -71,7 +71,7 @@ class Tool {
|
|
71
71
|
this.execute = execute;
|
72
72
|
}
|
73
73
|
/**
|
74
|
-
*
|
74
|
+
* Provides a structure definition with the name, description and schema of the input.
|
75
75
|
* This is used by `useTool`.
|
76
76
|
*/
|
77
77
|
get inputStructureDefinition() {
|
package/tool/Tool.d.ts
CHANGED
@@ -38,7 +38,7 @@ export declare class Tool<NAME extends string, INPUT, OUTPUT> {
|
|
38
38
|
execute(input: INPUT, options?: FunctionOptions): PromiseLike<OUTPUT>;
|
39
39
|
});
|
40
40
|
/**
|
41
|
-
*
|
41
|
+
* Provides a structure definition with the name, description and schema of the input.
|
42
42
|
* This is used by `useTool`.
|
43
43
|
*/
|
44
44
|
get inputStructureDefinition(): StructureDefinition<NAME, INPUT>;
|
package/tool/Tool.js
CHANGED
@@ -68,7 +68,7 @@ export class Tool {
|
|
68
68
|
this.execute = execute;
|
69
69
|
}
|
70
70
|
/**
|
71
|
-
*
|
71
|
+
* Provides a structure definition with the name, description and schema of the input.
|
72
72
|
* This is used by `useTool`.
|
73
73
|
*/
|
74
74
|
get inputStructureDefinition() {
|
File without changes
|
File without changes
|