modelfusion 0.69.0 → 0.70.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -9
- package/model-function/generate-text/index.cjs +1 -8
- package/model-function/generate-text/index.d.ts +1 -8
- package/model-function/generate-text/index.js +1 -8
- package/model-function/generate-text/prompt-format/AlpacaPromptFormat.cjs +31 -3
- package/model-function/generate-text/prompt-format/AlpacaPromptFormat.d.ts +29 -1
- package/model-function/generate-text/prompt-format/AlpacaPromptFormat.js +29 -1
- package/model-function/generate-text/prompt-format/ChatMLPromptFormat.cjs +79 -0
- package/model-function/generate-text/prompt-format/ChatMLPromptFormat.d.ts +31 -0
- package/model-function/generate-text/prompt-format/ChatMLPromptFormat.js +74 -0
- package/model-function/generate-text/prompt-format/ChatPrompt.d.ts +28 -23
- package/model-function/generate-text/prompt-format/ChatPromptValidationError.cjs +17 -0
- package/model-function/generate-text/prompt-format/ChatPromptValidationError.d.ts +8 -0
- package/model-function/generate-text/prompt-format/ChatPromptValidationError.js +13 -0
- package/model-function/generate-text/prompt-format/Llama2PromptFormat.cjs +41 -27
- package/model-function/generate-text/prompt-format/Llama2PromptFormat.d.ts +20 -2
- package/model-function/generate-text/prompt-format/Llama2PromptFormat.js +38 -24
- package/model-function/generate-text/prompt-format/TextPromptFormat.cjs +27 -30
- package/model-function/generate-text/prompt-format/TextPromptFormat.d.ts +7 -5
- package/model-function/generate-text/prompt-format/TextPromptFormat.js +24 -27
- package/model-function/generate-text/prompt-format/VicunaPromptFormat.cjs +21 -29
- package/model-function/generate-text/prompt-format/VicunaPromptFormat.d.ts +2 -2
- package/model-function/generate-text/prompt-format/VicunaPromptFormat.js +19 -27
- package/model-function/generate-text/prompt-format/index.cjs +39 -0
- package/model-function/generate-text/prompt-format/index.d.ts +10 -0
- package/model-function/generate-text/prompt-format/index.js +10 -0
- package/model-function/generate-text/prompt-format/trimChatPrompt.cjs +17 -22
- package/model-function/generate-text/prompt-format/trimChatPrompt.js +17 -22
- package/model-function/generate-text/prompt-format/validateChatPrompt.cjs +12 -24
- package/model-function/generate-text/prompt-format/validateChatPrompt.d.ts +0 -3
- package/model-function/generate-text/prompt-format/validateChatPrompt.js +10 -21
- package/model-provider/anthropic/AnthropicPromptFormat.cjs +22 -26
- package/model-provider/anthropic/AnthropicPromptFormat.d.ts +4 -2
- package/model-provider/anthropic/AnthropicPromptFormat.js +19 -23
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +2 -2
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +3 -3
- package/model-provider/anthropic/index.cjs +14 -2
- package/model-provider/anthropic/index.d.ts +1 -1
- package/model-provider/anthropic/index.js +1 -1
- package/model-provider/cohere/CohereTextGenerationModel.cjs +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +1 -1
- package/model-provider/cohere/CohereTextGenerationModel.js +4 -4
- package/model-provider/llamacpp/{mapInstructionPromptToBakLLaVA1ForLlamaCppFormat.cjs → LlamaCppBakLLaVA1Format.cjs} +4 -4
- package/model-provider/llamacpp/{mapInstructionPromptToBakLLaVA1ForLlamaCppFormat.d.ts → LlamaCppBakLLaVA1Format.d.ts} +2 -2
- package/model-provider/llamacpp/{mapInstructionPromptToBakLLaVA1ForLlamaCppFormat.js → LlamaCppBakLLaVA1Format.js} +2 -2
- package/model-provider/llamacpp/index.cjs +14 -2
- package/model-provider/llamacpp/index.d.ts +1 -1
- package/model-provider/llamacpp/index.js +1 -1
- package/model-provider/openai/OpenAICompletionModel.cjs +4 -4
- package/model-provider/openai/OpenAICompletionModel.d.ts +1 -1
- package/model-provider/openai/OpenAICompletionModel.js +5 -5
- package/model-provider/openai/chat/OpenAIChatModel.cjs +2 -2
- package/model-provider/openai/chat/OpenAIChatModel.d.ts +12 -12
- package/model-provider/openai/chat/OpenAIChatModel.js +3 -3
- package/model-provider/openai/chat/OpenAIChatPromptFormat.cjs +22 -34
- package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +2 -2
- package/model-provider/openai/chat/OpenAIChatPromptFormat.js +19 -31
- package/model-provider/openai/index.cjs +14 -2
- package/model-provider/openai/index.d.ts +1 -1
- package/model-provider/openai/index.js +1 -1
- package/package.json +1 -1
@@ -200,8 +200,8 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
200
200
|
id: string;
|
201
201
|
choices: {
|
202
202
|
message: {
|
203
|
-
content: string | null;
|
204
203
|
role: "assistant";
|
204
|
+
content: string | null;
|
205
205
|
function_call?: {
|
206
206
|
name: string;
|
207
207
|
arguments: string;
|
@@ -249,8 +249,8 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
249
249
|
id: string;
|
250
250
|
choices: {
|
251
251
|
message: {
|
252
|
-
content: string | null;
|
253
252
|
role: "assistant";
|
253
|
+
content: string | null;
|
254
254
|
function_call?: {
|
255
255
|
name: string;
|
256
256
|
arguments: string;
|
@@ -292,8 +292,8 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
292
292
|
id: string;
|
293
293
|
choices: {
|
294
294
|
message: {
|
295
|
-
content: string | null;
|
296
295
|
role: "assistant";
|
296
|
+
content: string | null;
|
297
297
|
function_call?: {
|
298
298
|
name: string;
|
299
299
|
arguments: string;
|
@@ -337,8 +337,8 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
337
337
|
id: string;
|
338
338
|
choices: {
|
339
339
|
message: {
|
340
|
-
content: string | null;
|
341
340
|
role: "assistant";
|
341
|
+
content: string | null;
|
342
342
|
function_call?: {
|
343
343
|
name: string;
|
344
344
|
arguments: string;
|
@@ -383,8 +383,8 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
|
|
383
383
|
id: string;
|
384
384
|
choices: {
|
385
385
|
message: {
|
386
|
-
content: string | null;
|
387
386
|
role: "assistant";
|
387
|
+
content: string | null;
|
388
388
|
function_call?: {
|
389
389
|
name: string;
|
390
390
|
arguments: string;
|
@@ -476,8 +476,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
476
476
|
id: string;
|
477
477
|
}>, "many">>;
|
478
478
|
}, "strip", z.ZodTypeAny, {
|
479
|
-
content: string | null;
|
480
479
|
role: "assistant";
|
480
|
+
content: string | null;
|
481
481
|
function_call?: {
|
482
482
|
name: string;
|
483
483
|
arguments: string;
|
@@ -491,8 +491,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
491
491
|
id: string;
|
492
492
|
}[] | undefined;
|
493
493
|
}, {
|
494
|
-
content: string | null;
|
495
494
|
role: "assistant";
|
495
|
+
content: string | null;
|
496
496
|
function_call?: {
|
497
497
|
name: string;
|
498
498
|
arguments: string;
|
@@ -511,8 +511,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
511
511
|
finish_reason: z.ZodNullable<z.ZodOptional<z.ZodEnum<["stop", "length", "tool_calls", "content_filter", "function_call"]>>>;
|
512
512
|
}, "strip", z.ZodTypeAny, {
|
513
513
|
message: {
|
514
|
-
content: string | null;
|
515
514
|
role: "assistant";
|
515
|
+
content: string | null;
|
516
516
|
function_call?: {
|
517
517
|
name: string;
|
518
518
|
arguments: string;
|
@@ -531,8 +531,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
531
531
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
532
532
|
}, {
|
533
533
|
message: {
|
534
|
-
content: string | null;
|
535
534
|
role: "assistant";
|
535
|
+
content: string | null;
|
536
536
|
function_call?: {
|
537
537
|
name: string;
|
538
538
|
arguments: string;
|
@@ -578,8 +578,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
578
578
|
id: string;
|
579
579
|
choices: {
|
580
580
|
message: {
|
581
|
-
content: string | null;
|
582
581
|
role: "assistant";
|
582
|
+
content: string | null;
|
583
583
|
function_call?: {
|
584
584
|
name: string;
|
585
585
|
arguments: string;
|
@@ -610,8 +610,8 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
610
610
|
id: string;
|
611
611
|
choices: {
|
612
612
|
message: {
|
613
|
-
content: string | null;
|
614
613
|
role: "assistant";
|
614
|
+
content: string | null;
|
615
615
|
function_call?: {
|
616
616
|
name: string;
|
617
617
|
arguments: string;
|
@@ -654,8 +654,8 @@ export declare const OpenAIChatResponseFormat: {
|
|
654
654
|
id: string;
|
655
655
|
choices: {
|
656
656
|
message: {
|
657
|
-
content: string | null;
|
658
657
|
role: "assistant";
|
658
|
+
content: string | null;
|
659
659
|
function_call?: {
|
660
660
|
name: string;
|
661
661
|
arguments: string;
|
@@ -10,7 +10,7 @@ import { PromptFormatTextStreamingModel } from "../../../model-function/generate
|
|
10
10
|
import { OpenAIApiConfiguration } from "../OpenAIApiConfiguration.js";
|
11
11
|
import { failedOpenAICallResponseHandler } from "../OpenAIError.js";
|
12
12
|
import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
|
13
|
-
import {
|
13
|
+
import { chat, instruction } from "./OpenAIChatPromptFormat.js";
|
14
14
|
import { createOpenAIChatDeltaIterableQueue } from "./OpenAIChatStreamIterable.js";
|
15
15
|
import { countOpenAIChatPromptTokens } from "./countOpenAIChatMessageTokens.js";
|
16
16
|
/*
|
@@ -394,13 +394,13 @@ export class OpenAIChatModel extends AbstractModel {
|
|
394
394
|
* Returns this model with an instruction prompt format.
|
395
395
|
*/
|
396
396
|
withInstructionPrompt() {
|
397
|
-
return this.withPromptFormat(
|
397
|
+
return this.withPromptFormat(instruction());
|
398
398
|
}
|
399
399
|
/**
|
400
400
|
* Returns this model with a chat prompt format.
|
401
401
|
*/
|
402
402
|
withChatPrompt() {
|
403
|
-
return this.withPromptFormat(
|
403
|
+
return this.withPromptFormat(chat());
|
404
404
|
}
|
405
405
|
withPromptFormat(promptFormat) {
|
406
406
|
return new PromptFormatTextStreamingModel({
|
@@ -1,12 +1,12 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.
|
3
|
+
exports.chat = exports.instruction = void 0;
|
4
4
|
const validateChatPrompt_js_1 = require("../../../model-function/generate-text/prompt-format/validateChatPrompt.cjs");
|
5
5
|
const OpenAIChatMessage_js_1 = require("./OpenAIChatMessage.cjs");
|
6
6
|
/**
|
7
7
|
* Formats an instruction prompt as an OpenAI chat prompt.
|
8
8
|
*/
|
9
|
-
function
|
9
|
+
function instruction() {
|
10
10
|
return {
|
11
11
|
format: (instruction) => {
|
12
12
|
const messages = [];
|
@@ -24,49 +24,37 @@ function mapInstructionPromptToOpenAIChatFormat() {
|
|
24
24
|
stopSequences: [],
|
25
25
|
};
|
26
26
|
}
|
27
|
-
exports.
|
27
|
+
exports.instruction = instruction;
|
28
28
|
/**
|
29
29
|
* Formats a chat prompt as an OpenAI chat prompt.
|
30
30
|
*/
|
31
|
-
function
|
31
|
+
function chat() {
|
32
32
|
return {
|
33
33
|
format: (chatPrompt) => {
|
34
34
|
(0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
|
35
35
|
const messages = [];
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
role:
|
52
|
-
|
53
|
-
});
|
54
|
-
continue;
|
55
|
-
}
|
56
|
-
// ai message:
|
57
|
-
if ("ai" in message) {
|
58
|
-
messages.push({
|
59
|
-
role: "assistant",
|
60
|
-
content: message.ai,
|
61
|
-
});
|
62
|
-
continue;
|
36
|
+
if (chatPrompt.system != null) {
|
37
|
+
messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.system(chatPrompt.system));
|
38
|
+
}
|
39
|
+
for (const { role, content } of chatPrompt.messages) {
|
40
|
+
switch (role) {
|
41
|
+
case "user": {
|
42
|
+
messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.user(content));
|
43
|
+
break;
|
44
|
+
}
|
45
|
+
case "assistant": {
|
46
|
+
messages.push(OpenAIChatMessage_js_1.OpenAIChatMessage.assistant(content));
|
47
|
+
break;
|
48
|
+
}
|
49
|
+
default: {
|
50
|
+
const _exhaustiveCheck = role;
|
51
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
52
|
+
}
|
63
53
|
}
|
64
|
-
// unsupported message:
|
65
|
-
throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
|
66
54
|
}
|
67
55
|
return messages;
|
68
56
|
},
|
69
57
|
stopSequences: [],
|
70
58
|
};
|
71
59
|
}
|
72
|
-
exports.
|
60
|
+
exports.chat = chat;
|
@@ -5,8 +5,8 @@ import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
|
|
5
5
|
/**
|
6
6
|
* Formats an instruction prompt as an OpenAI chat prompt.
|
7
7
|
*/
|
8
|
-
export declare function
|
8
|
+
export declare function instruction(): TextGenerationPromptFormat<InstructionPrompt, Array<OpenAIChatMessage>>;
|
9
9
|
/**
|
10
10
|
* Formats a chat prompt as an OpenAI chat prompt.
|
11
11
|
*/
|
12
|
-
export declare function
|
12
|
+
export declare function chat(): TextGenerationPromptFormat<ChatPrompt, Array<OpenAIChatMessage>>;
|
@@ -3,7 +3,7 @@ import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
|
|
3
3
|
/**
|
4
4
|
* Formats an instruction prompt as an OpenAI chat prompt.
|
5
5
|
*/
|
6
|
-
export function
|
6
|
+
export function instruction() {
|
7
7
|
return {
|
8
8
|
format: (instruction) => {
|
9
9
|
const messages = [];
|
@@ -24,41 +24,29 @@ export function mapInstructionPromptToOpenAIChatFormat() {
|
|
24
24
|
/**
|
25
25
|
* Formats a chat prompt as an OpenAI chat prompt.
|
26
26
|
*/
|
27
|
-
export function
|
27
|
+
export function chat() {
|
28
28
|
return {
|
29
29
|
format: (chatPrompt) => {
|
30
30
|
validateChatPrompt(chatPrompt);
|
31
31
|
const messages = [];
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
role:
|
48
|
-
|
49
|
-
});
|
50
|
-
continue;
|
51
|
-
}
|
52
|
-
// ai message:
|
53
|
-
if ("ai" in message) {
|
54
|
-
messages.push({
|
55
|
-
role: "assistant",
|
56
|
-
content: message.ai,
|
57
|
-
});
|
58
|
-
continue;
|
32
|
+
if (chatPrompt.system != null) {
|
33
|
+
messages.push(OpenAIChatMessage.system(chatPrompt.system));
|
34
|
+
}
|
35
|
+
for (const { role, content } of chatPrompt.messages) {
|
36
|
+
switch (role) {
|
37
|
+
case "user": {
|
38
|
+
messages.push(OpenAIChatMessage.user(content));
|
39
|
+
break;
|
40
|
+
}
|
41
|
+
case "assistant": {
|
42
|
+
messages.push(OpenAIChatMessage.assistant(content));
|
43
|
+
break;
|
44
|
+
}
|
45
|
+
default: {
|
46
|
+
const _exhaustiveCheck = role;
|
47
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
48
|
+
}
|
59
49
|
}
|
60
|
-
// unsupported message:
|
61
|
-
throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
|
62
50
|
}
|
63
51
|
return messages;
|
64
52
|
},
|
@@ -10,11 +10,23 @@ var __createBinding = (this && this.__createBinding) || (Object.create ? (functi
|
|
10
10
|
if (k2 === undefined) k2 = k;
|
11
11
|
o[k2] = m[k];
|
12
12
|
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
13
18
|
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
14
19
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
15
20
|
};
|
21
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
22
|
+
if (mod && mod.__esModule) return mod;
|
23
|
+
var result = {};
|
24
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
25
|
+
__setModuleDefault(result, mod);
|
26
|
+
return result;
|
27
|
+
};
|
16
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
17
|
-
exports.OpenAIError = void 0;
|
29
|
+
exports.OpenAIChatPromptFormat = exports.OpenAIError = void 0;
|
18
30
|
__exportStar(require("./AzureOpenAIApiConfiguration.cjs"), exports);
|
19
31
|
__exportStar(require("./OpenAIApiConfiguration.cjs"), exports);
|
20
32
|
__exportStar(require("./OpenAICompletionModel.cjs"), exports);
|
@@ -28,5 +40,5 @@ __exportStar(require("./OpenAITranscriptionModel.cjs"), exports);
|
|
28
40
|
__exportStar(require("./TikTokenTokenizer.cjs"), exports);
|
29
41
|
__exportStar(require("./chat/OpenAIChatMessage.cjs"), exports);
|
30
42
|
__exportStar(require("./chat/OpenAIChatModel.cjs"), exports);
|
31
|
-
|
43
|
+
exports.OpenAIChatPromptFormat = __importStar(require("./chat/OpenAIChatPromptFormat.cjs"));
|
32
44
|
__exportStar(require("./chat/countOpenAIChatMessageTokens.cjs"), exports);
|
@@ -10,6 +10,6 @@ export * from "./OpenAITranscriptionModel.js";
|
|
10
10
|
export * from "./TikTokenTokenizer.js";
|
11
11
|
export * from "./chat/OpenAIChatMessage.js";
|
12
12
|
export * from "./chat/OpenAIChatModel.js";
|
13
|
-
export * from "./chat/OpenAIChatPromptFormat.js";
|
13
|
+
export * as OpenAIChatPromptFormat from "./chat/OpenAIChatPromptFormat.js";
|
14
14
|
export { OpenAIChatDelta } from "./chat/OpenAIChatStreamIterable.js";
|
15
15
|
export * from "./chat/countOpenAIChatMessageTokens.js";
|
@@ -10,5 +10,5 @@ export * from "./OpenAITranscriptionModel.js";
|
|
10
10
|
export * from "./TikTokenTokenizer.js";
|
11
11
|
export * from "./chat/OpenAIChatMessage.js";
|
12
12
|
export * from "./chat/OpenAIChatModel.js";
|
13
|
-
export * from "./chat/OpenAIChatPromptFormat.js";
|
13
|
+
export * as OpenAIChatPromptFormat from "./chat/OpenAIChatPromptFormat.js";
|
14
14
|
export * from "./chat/countOpenAIChatMessageTokens.js";
|
package/package.json
CHANGED