modelfusion 0.78.0 → 0.80.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/README.md +4 -3
  2. package/model-function/generate-text/PromptFormatTextGenerationModel.cjs +14 -0
  3. package/model-function/generate-text/PromptFormatTextGenerationModel.d.ts +4 -0
  4. package/model-function/generate-text/PromptFormatTextGenerationModel.js +14 -0
  5. package/model-function/generate-text/prompt-format/AlpacaPromptFormat.cjs +10 -10
  6. package/model-function/generate-text/prompt-format/AlpacaPromptFormat.d.ts +2 -2
  7. package/model-function/generate-text/prompt-format/AlpacaPromptFormat.js +10 -10
  8. package/model-function/generate-text/prompt-format/ChatMLPromptFormat.cjs +15 -10
  9. package/model-function/generate-text/prompt-format/ChatMLPromptFormat.d.ts +3 -3
  10. package/model-function/generate-text/prompt-format/ChatMLPromptFormat.js +15 -10
  11. package/model-function/generate-text/prompt-format/ChatPrompt.cjs +24 -0
  12. package/model-function/generate-text/prompt-format/ChatPrompt.d.ts +6 -0
  13. package/model-function/generate-text/prompt-format/ChatPrompt.js +22 -1
  14. package/model-function/generate-text/prompt-format/Content.cjs +2 -0
  15. package/model-function/generate-text/prompt-format/Content.d.ts +20 -0
  16. package/model-function/generate-text/prompt-format/Content.js +1 -0
  17. package/model-function/generate-text/prompt-format/InstructionPrompt.d.ts +21 -16
  18. package/model-function/generate-text/prompt-format/InvalidPromptError.cjs +28 -0
  19. package/model-function/generate-text/prompt-format/InvalidPromptError.d.ts +13 -0
  20. package/model-function/generate-text/prompt-format/InvalidPromptError.js +24 -0
  21. package/model-function/generate-text/prompt-format/Llama2PromptFormat.cjs +14 -10
  22. package/model-function/generate-text/prompt-format/Llama2PromptFormat.d.ts +3 -3
  23. package/model-function/generate-text/prompt-format/Llama2PromptFormat.js +14 -10
  24. package/model-function/generate-text/prompt-format/TextPromptFormat.cjs +11 -11
  25. package/model-function/generate-text/prompt-format/TextPromptFormat.d.ts +3 -3
  26. package/model-function/generate-text/prompt-format/TextPromptFormat.js +11 -11
  27. package/model-function/generate-text/prompt-format/VicunaPromptFormat.cjs +6 -6
  28. package/model-function/generate-text/prompt-format/VicunaPromptFormat.d.ts +1 -1
  29. package/model-function/generate-text/prompt-format/VicunaPromptFormat.js +6 -6
  30. package/model-function/generate-text/prompt-format/index.cjs +2 -2
  31. package/model-function/generate-text/prompt-format/index.d.ts +2 -2
  32. package/model-function/generate-text/prompt-format/index.js +2 -2
  33. package/model-function/generate-text/prompt-format/trimChatPrompt.cjs +2 -2
  34. package/model-function/generate-text/prompt-format/trimChatPrompt.d.ts +1 -1
  35. package/model-function/generate-text/prompt-format/trimChatPrompt.js +1 -1
  36. package/model-provider/anthropic/AnthropicPromptFormat.cjs +10 -10
  37. package/model-provider/anthropic/AnthropicPromptFormat.d.ts +3 -3
  38. package/model-provider/anthropic/AnthropicPromptFormat.js +10 -10
  39. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +1 -1
  40. package/model-provider/cohere/CohereTextGenerationModel.d.ts +1 -1
  41. package/model-provider/llamacpp/LlamaCppBakLLaVA1Format.cjs +20 -11
  42. package/model-provider/llamacpp/LlamaCppBakLLaVA1Format.js +20 -11
  43. package/model-provider/ollama/OllamaTextGenerationModel.cjs +7 -0
  44. package/model-provider/ollama/OllamaTextGenerationModel.d.ts +2 -0
  45. package/model-provider/ollama/OllamaTextGenerationModel.js +7 -0
  46. package/model-provider/openai/OpenAICompletionModel.d.ts +1 -1
  47. package/model-provider/openai/chat/OpenAIChatMessage.cjs +19 -14
  48. package/model-provider/openai/chat/OpenAIChatMessage.d.ts +2 -5
  49. package/model-provider/openai/chat/OpenAIChatMessage.js +19 -14
  50. package/model-provider/openai/chat/OpenAIChatModel.d.ts +1 -1
  51. package/model-provider/openai/chat/OpenAIChatPromptFormat.cjs +11 -13
  52. package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +2 -2
  53. package/model-provider/openai/chat/OpenAIChatPromptFormat.js +11 -13
  54. package/package.json +1 -1
  55. package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +1 -1
  56. package/tool/generate-tool-call/TextGenerationToolCallModel.js +1 -1
  57. package/tool/{ToolCallParseError.cjs → generate-tool-call/ToolCallParseError.cjs} +1 -1
  58. package/tool/{ToolCallParseError.js → generate-tool-call/ToolCallParseError.js} +1 -1
  59. package/tool/generate-tool-call/index.cjs +1 -0
  60. package/tool/generate-tool-call/index.d.ts +1 -0
  61. package/tool/generate-tool-call/index.js +1 -0
  62. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.cjs +60 -0
  63. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +41 -0
  64. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js +56 -0
  65. package/tool/generate-tool-calls-or-text/ToolCallsOrTextParseError.cjs +36 -0
  66. package/tool/generate-tool-calls-or-text/ToolCallsOrTextParseError.d.ts +15 -0
  67. package/tool/generate-tool-calls-or-text/ToolCallsOrTextParseError.js +32 -0
  68. package/tool/generate-tool-calls-or-text/index.cjs +2 -0
  69. package/tool/generate-tool-calls-or-text/index.d.ts +2 -0
  70. package/tool/generate-tool-calls-or-text/index.js +2 -0
  71. package/model-function/generate-text/prompt-format/ChatPromptValidationError.cjs +0 -17
  72. package/model-function/generate-text/prompt-format/ChatPromptValidationError.d.ts +0 -8
  73. package/model-function/generate-text/prompt-format/ChatPromptValidationError.js +0 -13
  74. package/model-function/generate-text/prompt-format/validateChatPrompt.cjs +0 -24
  75. package/model-function/generate-text/prompt-format/validateChatPrompt.d.ts +0 -5
  76. package/model-function/generate-text/prompt-format/validateChatPrompt.js +0 -20
  77. /package/tool/{ToolCallParseError.d.ts → generate-tool-call/ToolCallParseError.d.ts} +0 -0
package/README.md CHANGED
@@ -87,9 +87,10 @@ import { streamText, openai } from "modelfusion";
87
87
  const textStream = await streamText(
88
88
  openai.ChatTextGenerator({ model: "gpt-4-vision-preview" }),
89
89
  [
90
- OpenAIChatMessage.user("Describe the image in detail:", {
91
- image: { base64Content: image, mimeType: "image/png" },
92
- }),
90
+ OpenAIChatMessage.user([
91
+ { type: "text", text: "Describe the image in detail:" },
92
+ { type: "image", base64Image: image, mimeType: "image/png" },
93
+ ]),
93
94
  ]
94
95
  );
95
96
  ```
@@ -1,6 +1,8 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.PromptFormatTextGenerationModel = void 0;
4
+ const TextGenerationToolCallModel_js_1 = require("../../tool/generate-tool-call/TextGenerationToolCallModel.cjs");
5
+ const TextGenerationToolCallsOrGenerateTextModel_js_1 = require("../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.cjs");
4
6
  class PromptFormatTextGenerationModel {
5
7
  constructor({ model, promptFormat, }) {
6
8
  Object.defineProperty(this, "model", {
@@ -44,6 +46,18 @@ class PromptFormatTextGenerationModel {
44
46
  get settingsForEvent() {
45
47
  return this.model.settingsForEvent;
46
48
  }
49
+ asToolCallGenerationModel(promptFormat) {
50
+ return new TextGenerationToolCallModel_js_1.TextGenerationToolCallModel({
51
+ model: this,
52
+ format: promptFormat,
53
+ });
54
+ }
55
+ asToolCallsOrTextGenerationModel(promptFormat) {
56
+ return new TextGenerationToolCallsOrGenerateTextModel_js_1.TextGenerationToolCallsOrGenerateTextModel({
57
+ model: this,
58
+ format: promptFormat,
59
+ });
60
+ }
47
61
  withPromptFormat(promptFormat) {
48
62
  return new PromptFormatTextGenerationModel({
49
63
  model: this.withSettings({
@@ -1,4 +1,6 @@
1
1
  import { FunctionOptions } from "../../core/FunctionOptions.js";
2
+ import { TextGenerationToolCallModel, ToolCallPromptFormat } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
3
+ import { TextGenerationToolCallsOrGenerateTextModel, ToolCallsOrGenerateTextPromptFormat } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
2
4
  import { TextGenerationModel, TextGenerationModelSettings } from "./TextGenerationModel.js";
3
5
  import { TextGenerationPromptFormat } from "./TextGenerationPromptFormat.js";
4
6
  export declare class PromptFormatTextGenerationModel<PROMPT, MODEL_PROMPT, SETTINGS extends TextGenerationModelSettings, MODEL extends TextGenerationModel<MODEL_PROMPT, SETTINGS>> implements TextGenerationModel<PROMPT, SETTINGS> {
@@ -23,6 +25,8 @@ export declare class PromptFormatTextGenerationModel<PROMPT, MODEL_PROMPT, SETTI
23
25
  } | undefined;
24
26
  }>;
25
27
  get settingsForEvent(): Partial<SETTINGS>;
28
+ asToolCallGenerationModel<INPUT_PROMPT>(promptFormat: ToolCallPromptFormat<INPUT_PROMPT, PROMPT>): TextGenerationToolCallModel<INPUT_PROMPT, PROMPT, this>;
29
+ asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptFormat: ToolCallsOrGenerateTextPromptFormat<INPUT_PROMPT, PROMPT>): TextGenerationToolCallsOrGenerateTextModel<INPUT_PROMPT, PROMPT, this>;
26
30
  withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, PROMPT>): PromptFormatTextGenerationModel<INPUT_PROMPT, PROMPT, SETTINGS, this>;
27
31
  withSettings(additionalSettings: Partial<SETTINGS>): this;
28
32
  }
@@ -1,3 +1,5 @@
1
+ import { TextGenerationToolCallModel, } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
2
+ import { TextGenerationToolCallsOrGenerateTextModel, } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
1
3
  export class PromptFormatTextGenerationModel {
2
4
  constructor({ model, promptFormat, }) {
3
5
  Object.defineProperty(this, "model", {
@@ -41,6 +43,18 @@ export class PromptFormatTextGenerationModel {
41
43
  get settingsForEvent() {
42
44
  return this.model.settingsForEvent;
43
45
  }
46
+ asToolCallGenerationModel(promptFormat) {
47
+ return new TextGenerationToolCallModel({
48
+ model: this,
49
+ format: promptFormat,
50
+ });
51
+ }
52
+ asToolCallsOrTextGenerationModel(promptFormat) {
53
+ return new TextGenerationToolCallsOrGenerateTextModel({
54
+ model: this,
55
+ format: promptFormat,
56
+ });
57
+ }
44
58
  withPromptFormat(promptFormat) {
45
59
  return new PromptFormatTextGenerationModel({
46
60
  model: this.withSettings({
@@ -9,10 +9,10 @@ const DEFAULT_SYSTEM_PROMPT_NO_INPUT = "Below is an instruction that describes a
9
9
  function text() {
10
10
  return {
11
11
  stopSequences: [],
12
- format: (instruction) => {
12
+ format(prompt) {
13
13
  let text = DEFAULT_SYSTEM_PROMPT_NO_INPUT;
14
14
  text += "\n\n### Instruction:\n";
15
- text += instruction;
15
+ text += prompt;
16
16
  text += "\n\n### Response:\n";
17
17
  return text;
18
18
  },
@@ -58,18 +58,18 @@ exports.text = text;
58
58
  function instruction() {
59
59
  return {
60
60
  stopSequences: [],
61
- format: (instruction) => {
62
- let text = instruction.system ??
63
- (instruction.input != null
61
+ format(prompt) {
62
+ let text = prompt.system ??
63
+ (prompt.input != null
64
64
  ? DEFAULT_SYSTEM_PROMPT_INPUT
65
65
  : DEFAULT_SYSTEM_PROMPT_NO_INPUT);
66
66
  text += "\n\n### Instruction:\n";
67
- if (instruction.system != null) {
68
- text += `${instruction.system}\n`;
67
+ if (prompt.system != null) {
68
+ text += `${prompt.system}\n`;
69
69
  }
70
- text += instruction.instruction;
71
- if (instruction.input != null) {
72
- text += `\n\n### Input:\n${instruction.input}`;
70
+ text += prompt.instruction;
71
+ if (prompt.input != null) {
72
+ text += `\n\n### Input:\n${prompt.input}`;
73
73
  }
74
74
  text += "\n\n### Response:\n";
75
75
  return text;
@@ -1,5 +1,5 @@
1
- import { InstructionPrompt } from "./InstructionPrompt.js";
2
1
  import { TextGenerationPromptFormat } from "../TextGenerationPromptFormat.js";
2
+ import { TextInstructionPrompt } from "./InstructionPrompt.js";
3
3
  /**
4
4
  * Formats a text prompt as an Alpaca prompt.
5
5
  */
@@ -40,7 +40,7 @@ export declare function text(): TextGenerationPromptFormat<string, string>;
40
40
  *
41
41
  * @see https://github.com/tatsu-lab/stanford_alpaca#data-release
42
42
  */
43
- export declare function instruction(): TextGenerationPromptFormat<InstructionPrompt & {
43
+ export declare function instruction(): TextGenerationPromptFormat<TextInstructionPrompt & {
44
44
  input?: string;
45
45
  }, // optional input supported by Alpaca
46
46
  string>;
@@ -6,10 +6,10 @@ const DEFAULT_SYSTEM_PROMPT_NO_INPUT = "Below is an instruction that describes a
6
6
  export function text() {
7
7
  return {
8
8
  stopSequences: [],
9
- format: (instruction) => {
9
+ format(prompt) {
10
10
  let text = DEFAULT_SYSTEM_PROMPT_NO_INPUT;
11
11
  text += "\n\n### Instruction:\n";
12
- text += instruction;
12
+ text += prompt;
13
13
  text += "\n\n### Response:\n";
14
14
  return text;
15
15
  },
@@ -54,18 +54,18 @@ export function text() {
54
54
  export function instruction() {
55
55
  return {
56
56
  stopSequences: [],
57
- format: (instruction) => {
58
- let text = instruction.system ??
59
- (instruction.input != null
57
+ format(prompt) {
58
+ let text = prompt.system ??
59
+ (prompt.input != null
60
60
  ? DEFAULT_SYSTEM_PROMPT_INPUT
61
61
  : DEFAULT_SYSTEM_PROMPT_NO_INPUT);
62
62
  text += "\n\n### Instruction:\n";
63
- if (instruction.system != null) {
64
- text += `${instruction.system}\n`;
63
+ if (prompt.system != null) {
64
+ text += `${prompt.system}\n`;
65
65
  }
66
- text += instruction.instruction;
67
- if (instruction.input != null) {
68
- text += `\n\n### Input:\n${instruction.input}`;
66
+ text += prompt.instruction;
67
+ if (prompt.input != null) {
68
+ text += `\n\n### Input:\n${prompt.input}`;
69
69
  }
70
70
  text += "\n\n### Response:\n";
71
71
  return text;
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.chat = exports.instruction = exports.text = void 0;
4
- const validateChatPrompt_js_1 = require("./validateChatPrompt.cjs");
4
+ const ChatPrompt_js_1 = require("./ChatPrompt.cjs");
5
5
  const START_SEGMENT = "<|im_start|>";
6
6
  const END_SEGMENT = "<|im_end|>";
7
7
  function chatMLStart(role) {
@@ -16,7 +16,10 @@ function chatMLSegment(role, text) {
16
16
  function text() {
17
17
  return {
18
18
  stopSequences: [END_SEGMENT],
19
- format: (instruction) => chatMLSegment("user", instruction),
19
+ format(prompt) {
20
+ // prompt and then prefix start of assistant response:
21
+ return chatMLSegment("user", prompt) + chatMLStart("assistant");
22
+ },
20
23
  };
21
24
  }
22
25
  exports.text = text;
@@ -36,8 +39,12 @@ exports.text = text;
36
39
  function instruction() {
37
40
  return {
38
41
  stopSequences: [END_SEGMENT],
39
- format: (instruction) => chatMLSegment("system", instruction.system) +
40
- chatMLSegment("user", instruction.instruction),
42
+ format(prompt) {
43
+ return (chatMLSegment("system", prompt.system) +
44
+ chatMLSegment("user", prompt.instruction) +
45
+ chatMLStart("assistant") // prefix start of assistant response
46
+ );
47
+ },
41
48
  };
42
49
  }
43
50
  exports.instruction = instruction;
@@ -56,12 +63,10 @@ exports.instruction = instruction;
56
63
  */
57
64
  function chat() {
58
65
  return {
59
- format: (chatPrompt) => {
60
- (0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
61
- let text = chatPrompt.system != null
62
- ? chatMLSegment("system", chatPrompt.system)
63
- : "";
64
- for (const { role, content } of chatPrompt.messages) {
66
+ format(prompt) {
67
+ (0, ChatPrompt_js_1.validateChatPrompt)(prompt);
68
+ let text = prompt.system != null ? chatMLSegment("system", prompt.system) : "";
69
+ for (const { role, content } of prompt.messages) {
65
70
  switch (role) {
66
71
  case "user": {
67
72
  text += chatMLSegment("user", content);
@@ -1,6 +1,6 @@
1
- import { ChatPrompt } from "./ChatPrompt.js";
2
- import { InstructionPrompt } from "./InstructionPrompt.js";
3
1
  import { TextGenerationPromptFormat } from "../TextGenerationPromptFormat.js";
2
+ import { ChatPrompt } from "./ChatPrompt.js";
3
+ import { TextInstructionPrompt } from "./InstructionPrompt.js";
4
4
  /**
5
5
  * Formats a text prompt using the ChatML format.
6
6
  */
@@ -18,7 +18,7 @@ export declare function text(): TextGenerationPromptFormat<string, string>;
18
18
  * Paris<|im_end|>
19
19
  * ```
20
20
  */
21
- export declare function instruction(): TextGenerationPromptFormat<InstructionPrompt, string>;
21
+ export declare function instruction(): TextGenerationPromptFormat<TextInstructionPrompt, string>;
22
22
  /**
23
23
  * Formats a chat prompt using the ChatML format.
24
24
  *
@@ -1,4 +1,4 @@
1
- import { validateChatPrompt } from "./validateChatPrompt.js";
1
+ import { validateChatPrompt } from "./ChatPrompt.js";
2
2
  const START_SEGMENT = "<|im_start|>";
3
3
  const END_SEGMENT = "<|im_end|>";
4
4
  function chatMLStart(role) {
@@ -13,7 +13,10 @@ function chatMLSegment(role, text) {
13
13
  export function text() {
14
14
  return {
15
15
  stopSequences: [END_SEGMENT],
16
- format: (instruction) => chatMLSegment("user", instruction),
16
+ format(prompt) {
17
+ // prompt and then prefix start of assistant response:
18
+ return chatMLSegment("user", prompt) + chatMLStart("assistant");
19
+ },
17
20
  };
18
21
  }
19
22
  /**
@@ -32,8 +35,12 @@ export function text() {
32
35
  export function instruction() {
33
36
  return {
34
37
  stopSequences: [END_SEGMENT],
35
- format: (instruction) => chatMLSegment("system", instruction.system) +
36
- chatMLSegment("user", instruction.instruction),
38
+ format(prompt) {
39
+ return (chatMLSegment("system", prompt.system) +
40
+ chatMLSegment("user", prompt.instruction) +
41
+ chatMLStart("assistant") // prefix start of assistant response
42
+ );
43
+ },
37
44
  };
38
45
  }
39
46
  /**
@@ -51,12 +58,10 @@ export function instruction() {
51
58
  */
52
59
  export function chat() {
53
60
  return {
54
- format: (chatPrompt) => {
55
- validateChatPrompt(chatPrompt);
56
- let text = chatPrompt.system != null
57
- ? chatMLSegment("system", chatPrompt.system)
58
- : "";
59
- for (const { role, content } of chatPrompt.messages) {
61
+ format(prompt) {
62
+ validateChatPrompt(prompt);
63
+ let text = prompt.system != null ? chatMLSegment("system", prompt.system) : "";
64
+ for (const { role, content } of prompt.messages) {
60
65
  switch (role) {
61
66
  case "user": {
62
67
  text += chatMLSegment("user", content);
@@ -1,2 +1,26 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.validateChatPrompt = void 0;
4
+ const InvalidPromptError_js_1 = require("./InvalidPromptError.cjs");
5
+ /**
6
+ * Checks if a chat prompt is valid. Throws a {@link ChatPromptValidationError} if it's not.
7
+ *
8
+ * @throws {@link ChatPromptValidationError}
9
+ */
10
+ function validateChatPrompt(chatPrompt) {
11
+ const messages = chatPrompt.messages;
12
+ if (messages.length < 1) {
13
+ throw new InvalidPromptError_js_1.InvalidPromptError("ChatPrompt should have at least one message.", chatPrompt);
14
+ }
15
+ for (let i = 0; i < messages.length; i++) {
16
+ const expectedRole = i % 2 === 0 ? "user" : "assistant";
17
+ const role = messages[i].role;
18
+ if (role !== expectedRole) {
19
+ throw new InvalidPromptError_js_1.InvalidPromptError(`Message at index ${i} should have role '${expectedRole}', but has role '${role}'.`, chatPrompt);
20
+ }
21
+ }
22
+ if (messages.length % 2 === 0) {
23
+ throw new InvalidPromptError_js_1.InvalidPromptError("The last message must be a user message.", chatPrompt);
24
+ }
25
+ }
26
+ exports.validateChatPrompt = validateChatPrompt;
@@ -36,3 +36,9 @@ export type ChatMessage = {
36
36
  role: "user" | "assistant";
37
37
  content: string;
38
38
  };
39
+ /**
40
+ * Checks if a chat prompt is valid. Throws a {@link ChatPromptValidationError} if it's not.
41
+ *
42
+ * @throws {@link ChatPromptValidationError}
43
+ */
44
+ export declare function validateChatPrompt(chatPrompt: ChatPrompt): void;
@@ -1 +1,22 @@
1
- export {};
1
+ import { InvalidPromptError } from "./InvalidPromptError.js";
2
+ /**
3
+ * Checks if a chat prompt is valid. Throws a {@link ChatPromptValidationError} if it's not.
4
+ *
5
+ * @throws {@link ChatPromptValidationError}
6
+ */
7
+ export function validateChatPrompt(chatPrompt) {
8
+ const messages = chatPrompt.messages;
9
+ if (messages.length < 1) {
10
+ throw new InvalidPromptError("ChatPrompt should have at least one message.", chatPrompt);
11
+ }
12
+ for (let i = 0; i < messages.length; i++) {
13
+ const expectedRole = i % 2 === 0 ? "user" : "assistant";
14
+ const role = messages[i].role;
15
+ if (role !== expectedRole) {
16
+ throw new InvalidPromptError(`Message at index ${i} should have role '${expectedRole}', but has role '${role}'.`, chatPrompt);
17
+ }
18
+ }
19
+ if (messages.length % 2 === 0) {
20
+ throw new InvalidPromptError("The last message must be a user message.", chatPrompt);
21
+ }
22
+ }
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,20 @@
1
+ export type MultiModalInput = Array<Content>;
2
+ export type Content = TextContent | ImageContent;
3
+ export type TextContent = {
4
+ type: "text";
5
+ /**
6
+ * The text content.
7
+ */
8
+ text: string;
9
+ };
10
+ export type ImageContent = {
11
+ type: "image";
12
+ /**
13
+ * Base-64 encoded image.
14
+ */
15
+ base64Image: string;
16
+ /**
17
+ * Optional mime type of the image.
18
+ */
19
+ mimeType?: string;
20
+ };
@@ -1,5 +1,23 @@
1
+ import { MultiModalInput } from "./Content.js";
1
2
  /**
2
- * A single instruction prompt. It can contain an optional system message to define the role and behavior of the language model.
3
+ * A single multi-modal instruction prompt. It can contain an optional system message to define
4
+ * the role and behavior of the language model.
5
+ * The instruction is a multi-model input (`array` of content).
6
+ */
7
+ export type InstructionPrompt = {
8
+ /**
9
+ * Optional system message to provide context for the language model. Note that for some models,
10
+ * changing the system message can impact the results, because the model may be trained on the default system message.
11
+ */
12
+ system?: string;
13
+ /**
14
+ * The multi-modal instruction for the model.
15
+ */
16
+ instruction: MultiModalInput;
17
+ };
18
+ /**
19
+ * A single text instruction prompt. It can contain an optional system message to define
20
+ * the role and behavior of the language model.
3
21
  *
4
22
  * @example
5
23
  * ```ts
@@ -9,27 +27,14 @@
9
27
  * }
10
28
  * ```
11
29
  */
12
- export type InstructionPrompt = {
30
+ export type TextInstructionPrompt = {
13
31
  /**
14
32
  * Optional system message to provide context for the language model. Note that for some models,
15
33
  * changing the system message can impact the results, because the model may be trained on the default system message.
16
34
  */
17
35
  system?: string;
18
36
  /**
19
- * The instruction for the model.
37
+ * The text instruction for the model.
20
38
  */
21
39
  instruction: string;
22
- /**
23
- * Optional image to provide context for the language model. Only supported by some models.
24
- */
25
- image?: {
26
- /**
27
- * Base-64 encoded image.
28
- */
29
- base64Content: string;
30
- /**
31
- * Optional mime type of the image.
32
- */
33
- mimeType?: string;
34
- };
35
40
  };
@@ -0,0 +1,28 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.InvalidPromptError = void 0;
4
+ /**
5
+ * Error thrown when a prompt validation fails.
6
+ */
7
+ class InvalidPromptError extends Error {
8
+ constructor(message, prompt) {
9
+ super(message);
10
+ Object.defineProperty(this, "prompt", {
11
+ enumerable: true,
12
+ configurable: true,
13
+ writable: true,
14
+ value: void 0
15
+ });
16
+ this.name = "InvalidPromptError";
17
+ this.prompt = prompt;
18
+ }
19
+ toJSON() {
20
+ return {
21
+ name: this.name,
22
+ message: this.message,
23
+ stack: this.stack,
24
+ prompt: this.prompt,
25
+ };
26
+ }
27
+ }
28
+ exports.InvalidPromptError = InvalidPromptError;
@@ -0,0 +1,13 @@
1
+ /**
2
+ * Error thrown when a prompt validation fails.
3
+ */
4
+ export declare class InvalidPromptError extends Error {
5
+ readonly prompt: unknown;
6
+ constructor(message: string, prompt: unknown);
7
+ toJSON(): {
8
+ name: string;
9
+ message: string;
10
+ stack: string | undefined;
11
+ prompt: unknown;
12
+ };
13
+ }
@@ -0,0 +1,24 @@
1
+ /**
2
+ * Error thrown when a prompt validation fails.
3
+ */
4
+ export class InvalidPromptError extends Error {
5
+ constructor(message, prompt) {
6
+ super(message);
7
+ Object.defineProperty(this, "prompt", {
8
+ enumerable: true,
9
+ configurable: true,
10
+ writable: true,
11
+ value: void 0
12
+ });
13
+ this.name = "InvalidPromptError";
14
+ this.prompt = prompt;
15
+ }
16
+ toJSON() {
17
+ return {
18
+ name: this.name,
19
+ message: this.message,
20
+ stack: this.stack,
21
+ prompt: this.prompt,
22
+ };
23
+ }
24
+ }
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.chat = exports.instruction = exports.text = void 0;
4
- const validateChatPrompt_js_1 = require("./validateChatPrompt.cjs");
4
+ const ChatPrompt_js_1 = require("./ChatPrompt.cjs");
5
5
  // see https://github.com/facebookresearch/llama/blob/6c7fe276574e78057f917549435a2554000a876d/llama/generation.py#L44
6
6
  const BEGIN_SEGMENT = "<s>";
7
7
  const END_SEGMENT = "</s>";
@@ -22,7 +22,9 @@ const END_SYSTEM = "\n<</SYS>>\n\n";
22
22
  function text() {
23
23
  return {
24
24
  stopSequences: [END_SEGMENT],
25
- format: (instruction) => `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction}${END_INSTRUCTION}\n`,
25
+ format(prompt) {
26
+ return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt}${END_INSTRUCTION}\n`;
27
+ },
26
28
  };
27
29
  }
28
30
  exports.text = text;
@@ -43,9 +45,11 @@ exports.text = text;
43
45
  function instruction() {
44
46
  return {
45
47
  stopSequences: [END_SEGMENT],
46
- format: (instruction) => `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction.system != null
47
- ? ` ${BEGIN_SYSTEM}${instruction.system}${END_SYSTEM}`
48
- : ""}${instruction.instruction}${END_INSTRUCTION}\n`,
48
+ format(prompt) {
49
+ return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt.system != null
50
+ ? ` ${BEGIN_SYSTEM}${prompt.system}${END_SYSTEM}`
51
+ : ""}${prompt.instruction}${END_INSTRUCTION}\n`;
52
+ },
49
53
  };
50
54
  }
51
55
  exports.instruction = instruction;
@@ -63,14 +67,14 @@ exports.instruction = instruction;
63
67
  */
64
68
  function chat() {
65
69
  return {
66
- format: (chatPrompt) => {
67
- (0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
68
- let text = chatPrompt.system != null
70
+ format(prompt) {
71
+ (0, ChatPrompt_js_1.validateChatPrompt)(prompt);
72
+ let text = prompt.system != null
69
73
  ? // Separate section for system message to simplify implementation
70
74
  // (this is slightly different from the original instructions):
71
- `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${BEGIN_SYSTEM}${chatPrompt.system}${END_SYSTEM}${END_INSTRUCTION}${END_SEGMENT}`
75
+ `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${BEGIN_SYSTEM}${prompt.system}${END_SYSTEM}${END_INSTRUCTION}${END_SEGMENT}`
72
76
  : "";
73
- for (const { role, content } of chatPrompt.messages) {
77
+ for (const { role, content } of prompt.messages) {
74
78
  switch (role) {
75
79
  case "user": {
76
80
  text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${content}${END_INSTRUCTION}`;
@@ -1,6 +1,6 @@
1
- import { ChatPrompt } from "./ChatPrompt.js";
2
- import { InstructionPrompt } from "./InstructionPrompt.js";
3
1
  import { TextGenerationPromptFormat } from "../TextGenerationPromptFormat.js";
2
+ import { ChatPrompt } from "./ChatPrompt.js";
3
+ import { TextInstructionPrompt } from "./InstructionPrompt.js";
4
4
  /**
5
5
  * Formats a text prompt as a Llama 2 prompt.
6
6
  *
@@ -26,7 +26,7 @@ export declare function text(): TextGenerationPromptFormat<string, string>;
26
26
  *
27
27
  * @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
28
28
  */
29
- export declare function instruction(): TextGenerationPromptFormat<InstructionPrompt, string>;
29
+ export declare function instruction(): TextGenerationPromptFormat<TextInstructionPrompt, string>;
30
30
  /**
31
31
  * Formats a chat prompt as a Llama 2 prompt.
32
32
  *
@@ -1,4 +1,4 @@
1
- import { validateChatPrompt } from "./validateChatPrompt.js";
1
+ import { validateChatPrompt } from "./ChatPrompt.js";
2
2
  // see https://github.com/facebookresearch/llama/blob/6c7fe276574e78057f917549435a2554000a876d/llama/generation.py#L44
3
3
  const BEGIN_SEGMENT = "<s>";
4
4
  const END_SEGMENT = "</s>";
@@ -19,7 +19,9 @@ const END_SYSTEM = "\n<</SYS>>\n\n";
19
19
  export function text() {
20
20
  return {
21
21
  stopSequences: [END_SEGMENT],
22
- format: (instruction) => `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction}${END_INSTRUCTION}\n`,
22
+ format(prompt) {
23
+ return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt}${END_INSTRUCTION}\n`;
24
+ },
23
25
  };
24
26
  }
25
27
  /**
@@ -39,9 +41,11 @@ export function text() {
39
41
  export function instruction() {
40
42
  return {
41
43
  stopSequences: [END_SEGMENT],
42
- format: (instruction) => `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction.system != null
43
- ? ` ${BEGIN_SYSTEM}${instruction.system}${END_SYSTEM}`
44
- : ""}${instruction.instruction}${END_INSTRUCTION}\n`,
44
+ format(prompt) {
45
+ return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt.system != null
46
+ ? ` ${BEGIN_SYSTEM}${prompt.system}${END_SYSTEM}`
47
+ : ""}${prompt.instruction}${END_INSTRUCTION}\n`;
48
+ },
45
49
  };
46
50
  }
47
51
  /**
@@ -58,14 +62,14 @@ export function instruction() {
58
62
  */
59
63
  export function chat() {
60
64
  return {
61
- format: (chatPrompt) => {
62
- validateChatPrompt(chatPrompt);
63
- let text = chatPrompt.system != null
65
+ format(prompt) {
66
+ validateChatPrompt(prompt);
67
+ let text = prompt.system != null
64
68
  ? // Separate section for system message to simplify implementation
65
69
  // (this is slightly different from the original instructions):
66
- `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${BEGIN_SYSTEM}${chatPrompt.system}${END_SYSTEM}${END_INSTRUCTION}${END_SEGMENT}`
70
+ `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${BEGIN_SYSTEM}${prompt.system}${END_SYSTEM}${END_INSTRUCTION}${END_SEGMENT}`
67
71
  : "";
68
- for (const { role, content } of chatPrompt.messages) {
72
+ for (const { role, content } of prompt.messages) {
69
73
  switch (role) {
70
74
  case "user": {
71
75
  text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${content}${END_INSTRUCTION}`;