modelfusion 0.30.1 → 0.32.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/README.md +14 -20
  2. package/model-function/Model.d.ts +1 -1
  3. package/model-function/SuccessfulModelCall.cjs +2 -9
  4. package/model-function/SuccessfulModelCall.d.ts +10 -7
  5. package/model-function/SuccessfulModelCall.js +2 -9
  6. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +2 -2
  7. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +5 -9
  8. package/model-function/generate-structure/StructureFromTextGenerationModel.js +2 -2
  9. package/model-function/generate-structure/StructureGenerationModel.d.ts +3 -2
  10. package/model-function/generate-structure/StructureOrTextGenerationModel.d.ts +4 -5
  11. package/model-function/generate-structure/generateStructure.cjs +5 -2
  12. package/model-function/generate-structure/generateStructure.d.ts +1 -1
  13. package/model-function/generate-structure/generateStructure.js +5 -2
  14. package/model-function/generate-structure/generateStructureOrText.cjs +8 -5
  15. package/model-function/generate-structure/generateStructureOrText.d.ts +2 -2
  16. package/model-function/generate-structure/generateStructureOrText.js +8 -5
  17. package/model-function/index.cjs +2 -3
  18. package/model-function/index.d.ts +2 -3
  19. package/model-function/index.js +2 -3
  20. package/model-provider/openai/OpenAICostCalculator.cjs +6 -5
  21. package/model-provider/openai/OpenAICostCalculator.js +6 -5
  22. package/model-provider/openai/OpenAITextGenerationModel.cjs +41 -17
  23. package/model-provider/openai/OpenAITextGenerationModel.d.ts +32 -14
  24. package/model-provider/openai/OpenAITextGenerationModel.js +41 -17
  25. package/model-provider/openai/TikTokenTokenizer.cjs +3 -2
  26. package/model-provider/openai/TikTokenTokenizer.js +3 -2
  27. package/model-provider/openai/chat/OpenAIChatModel.cjs +47 -9
  28. package/model-provider/openai/chat/OpenAIChatModel.d.ts +15 -5
  29. package/model-provider/openai/chat/OpenAIChatModel.js +47 -9
  30. package/model-provider/openai/chat/OpenAIChatPromptFormat.cjs +78 -0
  31. package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +12 -0
  32. package/model-provider/openai/chat/OpenAIChatPromptFormat.js +73 -0
  33. package/model-provider/openai/index.cjs +2 -3
  34. package/model-provider/openai/index.d.ts +1 -1
  35. package/model-provider/openai/index.js +1 -1
  36. package/model-provider/stability/StabilityImageGenerationModel.cjs +6 -0
  37. package/model-provider/stability/StabilityImageGenerationModel.d.ts +4 -2
  38. package/model-provider/stability/StabilityImageGenerationModel.js +6 -0
  39. package/package.json +2 -2
  40. package/prompt/AlpacaPromptFormat.cjs +23 -21
  41. package/prompt/AlpacaPromptFormat.d.ts +1 -1
  42. package/prompt/AlpacaPromptFormat.js +21 -19
  43. package/prompt/InstructionPrompt.d.ts +9 -0
  44. package/prompt/Llama2PromptFormat.cjs +44 -40
  45. package/prompt/Llama2PromptFormat.d.ts +2 -2
  46. package/prompt/Llama2PromptFormat.js +41 -37
  47. package/prompt/TextPromptFormat.cjs +5 -5
  48. package/prompt/TextPromptFormat.d.ts +2 -2
  49. package/prompt/TextPromptFormat.js +2 -2
  50. package/prompt/VicunaPromptFormat.cjs +39 -37
  51. package/prompt/VicunaPromptFormat.d.ts +1 -1
  52. package/prompt/VicunaPromptFormat.js +37 -35
  53. package/prompt/index.cjs +0 -1
  54. package/prompt/index.d.ts +0 -1
  55. package/prompt/index.js +0 -1
  56. package/tool/useTool.cjs +5 -1
  57. package/tool/useTool.d.ts +1 -1
  58. package/tool/useTool.js +5 -1
  59. package/tool/useToolOrGenerateText.cjs +5 -2
  60. package/tool/useToolOrGenerateText.d.ts +2 -2
  61. package/tool/useToolOrGenerateText.js +5 -2
  62. package/model-function/generate-structure/InstructionWithStructurePrompt.cjs +0 -17
  63. package/model-function/generate-structure/InstructionWithStructurePrompt.d.ts +0 -17
  64. package/model-function/generate-structure/InstructionWithStructurePrompt.js +0 -14
  65. package/model-provider/openai/chat/OpenAIChatPrompt.cjs +0 -135
  66. package/model-provider/openai/chat/OpenAIChatPrompt.d.ts +0 -96
  67. package/model-provider/openai/chat/OpenAIChatPrompt.js +0 -127
  68. package/prompt/OpenAIChatPromptFormat.cjs +0 -74
  69. package/prompt/OpenAIChatPromptFormat.d.ts +0 -12
  70. package/prompt/OpenAIChatPromptFormat.js +0 -69
@@ -0,0 +1,78 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.mapChatPromptToOpenAIChatFormat = exports.mapInstructionPromptToOpenAIChatFormat = void 0;
4
+ const validateChatPrompt_js_1 = require("../../../prompt/chat/validateChatPrompt.cjs");
5
+ /**
6
+ * Formats an instruction prompt as an OpenAI chat prompt.
7
+ */
8
+ function mapInstructionPromptToOpenAIChatFormat() {
9
+ return {
10
+ format: (instruction) => {
11
+ const messages = [];
12
+ if (instruction.system != null) {
13
+ messages.push({
14
+ role: "system",
15
+ content: instruction.system,
16
+ });
17
+ }
18
+ messages.push({
19
+ role: "user",
20
+ content: instruction.instruction,
21
+ });
22
+ if (instruction.input != null) {
23
+ messages.push({
24
+ role: "user",
25
+ content: instruction.input,
26
+ });
27
+ }
28
+ return messages;
29
+ },
30
+ stopSequences: [],
31
+ };
32
+ }
33
+ exports.mapInstructionPromptToOpenAIChatFormat = mapInstructionPromptToOpenAIChatFormat;
34
+ /**
35
+ * Formats a chat prompt as an OpenAI chat prompt.
36
+ */
37
+ function mapChatPromptToOpenAIChatFormat() {
38
+ return {
39
+ format: (chatPrompt) => {
40
+ (0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
41
+ const messages = [];
42
+ for (let i = 0; i < chatPrompt.length; i++) {
43
+ const message = chatPrompt[i];
44
+ // system message:
45
+ if (i === 0 &&
46
+ "system" in message &&
47
+ typeof message.system === "string") {
48
+ messages.push({
49
+ role: "system",
50
+ content: message.system,
51
+ });
52
+ continue;
53
+ }
54
+ // user message
55
+ if ("user" in message) {
56
+ messages.push({
57
+ role: "user",
58
+ content: message.user,
59
+ });
60
+ continue;
61
+ }
62
+ // ai message:
63
+ if ("ai" in message) {
64
+ messages.push({
65
+ role: "assistant",
66
+ content: message.ai,
67
+ });
68
+ continue;
69
+ }
70
+ // unsupported message:
71
+ throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
72
+ }
73
+ return messages;
74
+ },
75
+ stopSequences: [],
76
+ };
77
+ }
78
+ exports.mapChatPromptToOpenAIChatFormat = mapChatPromptToOpenAIChatFormat;
@@ -0,0 +1,12 @@
1
+ import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
2
+ import { ChatPrompt } from "../../../prompt/chat/ChatPrompt.js";
3
+ import { InstructionPrompt } from "../../../prompt/InstructionPrompt.js";
4
+ import { PromptFormat } from "../../../prompt/PromptFormat.js";
5
+ /**
6
+ * Formats an instruction prompt as an OpenAI chat prompt.
7
+ */
8
+ export declare function mapInstructionPromptToOpenAIChatFormat(): PromptFormat<InstructionPrompt, Array<OpenAIChatMessage>>;
9
+ /**
10
+ * Formats a chat prompt as an OpenAI chat prompt.
11
+ */
12
+ export declare function mapChatPromptToOpenAIChatFormat(): PromptFormat<ChatPrompt, Array<OpenAIChatMessage>>;
@@ -0,0 +1,73 @@
1
+ import { validateChatPrompt } from "../../../prompt/chat/validateChatPrompt.js";
2
+ /**
3
+ * Formats an instruction prompt as an OpenAI chat prompt.
4
+ */
5
+ export function mapInstructionPromptToOpenAIChatFormat() {
6
+ return {
7
+ format: (instruction) => {
8
+ const messages = [];
9
+ if (instruction.system != null) {
10
+ messages.push({
11
+ role: "system",
12
+ content: instruction.system,
13
+ });
14
+ }
15
+ messages.push({
16
+ role: "user",
17
+ content: instruction.instruction,
18
+ });
19
+ if (instruction.input != null) {
20
+ messages.push({
21
+ role: "user",
22
+ content: instruction.input,
23
+ });
24
+ }
25
+ return messages;
26
+ },
27
+ stopSequences: [],
28
+ };
29
+ }
30
+ /**
31
+ * Formats a chat prompt as an OpenAI chat prompt.
32
+ */
33
+ export function mapChatPromptToOpenAIChatFormat() {
34
+ return {
35
+ format: (chatPrompt) => {
36
+ validateChatPrompt(chatPrompt);
37
+ const messages = [];
38
+ for (let i = 0; i < chatPrompt.length; i++) {
39
+ const message = chatPrompt[i];
40
+ // system message:
41
+ if (i === 0 &&
42
+ "system" in message &&
43
+ typeof message.system === "string") {
44
+ messages.push({
45
+ role: "system",
46
+ content: message.system,
47
+ });
48
+ continue;
49
+ }
50
+ // user message
51
+ if ("user" in message) {
52
+ messages.push({
53
+ role: "user",
54
+ content: message.user,
55
+ });
56
+ continue;
57
+ }
58
+ // ai message:
59
+ if ("ai" in message) {
60
+ messages.push({
61
+ role: "assistant",
62
+ content: message.ai,
63
+ });
64
+ continue;
65
+ }
66
+ // unsupported message:
67
+ throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
68
+ }
69
+ return messages;
70
+ },
71
+ stopSequences: [],
72
+ };
73
+ }
@@ -14,7 +14,7 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
14
  for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
15
  };
16
16
  Object.defineProperty(exports, "__esModule", { value: true });
17
- exports.OpenAIChatFunctionPrompt = exports.OpenAIError = void 0;
17
+ exports.OpenAIError = void 0;
18
18
  __exportStar(require("./AzureOpenAIApiConfiguration.cjs"), exports);
19
19
  __exportStar(require("./OpenAIApiConfiguration.cjs"), exports);
20
20
  __exportStar(require("./OpenAICostCalculator.cjs"), exports);
@@ -27,6 +27,5 @@ __exportStar(require("./OpenAITranscriptionModel.cjs"), exports);
27
27
  __exportStar(require("./TikTokenTokenizer.cjs"), exports);
28
28
  __exportStar(require("./chat/OpenAIChatMessage.cjs"), exports);
29
29
  __exportStar(require("./chat/OpenAIChatModel.cjs"), exports);
30
- var OpenAIChatPrompt_js_1 = require("./chat/OpenAIChatPrompt.cjs");
31
- Object.defineProperty(exports, "OpenAIChatFunctionPrompt", { enumerable: true, get: function () { return OpenAIChatPrompt_js_1.OpenAIChatFunctionPrompt; } });
30
+ __exportStar(require("./chat/OpenAIChatPromptFormat.cjs"), exports);
32
31
  __exportStar(require("./chat/countOpenAIChatMessageTokens.cjs"), exports);
@@ -9,6 +9,6 @@ export * from "./OpenAITranscriptionModel.js";
9
9
  export * from "./TikTokenTokenizer.js";
10
10
  export * from "./chat/OpenAIChatMessage.js";
11
11
  export * from "./chat/OpenAIChatModel.js";
12
- export { OpenAIChatFunctionPrompt } from "./chat/OpenAIChatPrompt.js";
12
+ export * from "./chat/OpenAIChatPromptFormat.js";
13
13
  export { OpenAIChatDelta } from "./chat/OpenAIChatStreamIterable.js";
14
14
  export * from "./chat/countOpenAIChatMessageTokens.js";
@@ -9,5 +9,5 @@ export * from "./OpenAITranscriptionModel.js";
9
9
  export * from "./TikTokenTokenizer.js";
10
10
  export * from "./chat/OpenAIChatMessage.js";
11
11
  export * from "./chat/OpenAIChatModel.js";
12
- export { OpenAIChatFunctionPrompt } from "./chat/OpenAIChatPrompt.js";
12
+ export * from "./chat/OpenAIChatPromptFormat.js";
13
13
  export * from "./chat/countOpenAIChatMessageTokens.js";
@@ -86,6 +86,12 @@ class StabilityImageGenerationModel extends AbstractModel_js_1.AbstractModel {
86
86
  }
87
87
  }
88
88
  exports.StabilityImageGenerationModel = StabilityImageGenerationModel;
89
+ const stabilityImageGenerationModels = [
90
+ "stable-diffusion-v1-5",
91
+ "stable-diffusion-512-v2-1",
92
+ "stable-diffusion-xl-1024-v0-9",
93
+ "stable-diffusion-xl-1024-v1-0",
94
+ ];
89
95
  const stabilityImageGenerationResponseSchema = zod_1.z.object({
90
96
  artifacts: zod_1.z.array(zod_1.z.object({
91
97
  base64: zod_1.z.string(),
@@ -28,7 +28,7 @@ import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-
28
28
  export declare class StabilityImageGenerationModel extends AbstractModel<StabilityImageGenerationModelSettings> implements ImageGenerationModel<StabilityImageGenerationPrompt, StabilityImageGenerationResponse, StabilityImageGenerationModelSettings> {
29
29
  constructor(settings: StabilityImageGenerationModelSettings);
30
30
  readonly provider: "stability";
31
- get modelName(): string;
31
+ get modelName(): StabilityImageGenerationModelType;
32
32
  callAPI(input: StabilityImageGenerationPrompt, options?: ModelFunctionOptions<StabilityImageGenerationModelSettings>): Promise<StabilityImageGenerationResponse>;
33
33
  get settingsForEvent(): Partial<StabilityImageGenerationModelSettings>;
34
34
  generateImageResponse(prompt: StabilityImageGenerationPrompt, options?: ModelFunctionOptions<StabilityImageGenerationModelSettings>): Promise<{
@@ -41,9 +41,11 @@ export declare class StabilityImageGenerationModel extends AbstractModel<Stabili
41
41
  extractBase64Image(response: StabilityImageGenerationResponse): string;
42
42
  withSettings(additionalSettings: StabilityImageGenerationModelSettings): this;
43
43
  }
44
+ declare const stabilityImageGenerationModels: readonly ["stable-diffusion-v1-5", "stable-diffusion-512-v2-1", "stable-diffusion-xl-1024-v0-9", "stable-diffusion-xl-1024-v1-0"];
45
+ export type StabilityImageGenerationModelType = (typeof stabilityImageGenerationModels)[number] | (string & {});
44
46
  export interface StabilityImageGenerationModelSettings extends ImageGenerationModelSettings {
45
47
  api?: ApiConfiguration;
46
- model: string;
48
+ model: StabilityImageGenerationModelType;
47
49
  height?: number;
48
50
  width?: number;
49
51
  cfgScale?: number;
@@ -82,6 +82,12 @@ export class StabilityImageGenerationModel extends AbstractModel {
82
82
  return new StabilityImageGenerationModel(Object.assign({}, this.settings, additionalSettings));
83
83
  }
84
84
  }
85
+ const stabilityImageGenerationModels = [
86
+ "stable-diffusion-v1-5",
87
+ "stable-diffusion-512-v2-1",
88
+ "stable-diffusion-xl-1024-v0-9",
89
+ "stable-diffusion-xl-1024-v1-0",
90
+ ];
85
91
  const stabilityImageGenerationResponseSchema = z.object({
86
92
  artifacts: z.array(z.object({
87
93
  base64: z.string(),
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
3
  "description": "Build multimodal applications, chatbots, and agents with JavaScript and TypeScript.",
4
- "version": "0.30.1",
4
+ "version": "0.32.0",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [
@@ -55,7 +55,7 @@
55
55
  "js-tiktoken": "1.0.7",
56
56
  "nanoid": "3.3.6",
57
57
  "secure-json-parse": "2.7.0",
58
- "zod": "3.22.2",
58
+ "zod": "3.21.4",
59
59
  "zod-to-json-schema": "3.21.4"
60
60
  },
61
61
  "devDependencies": {
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.AlpacaInstructionPromptFormat = void 0;
3
+ exports.mapInstructionPromptToAlpacaFormat = void 0;
4
4
  const DEFAULT_SYSTEM_PROMPT_INPUT = "Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.";
5
5
  const DEFAULT_SYSTEM_PROMPT_NO_INPUT = "Below is an instruction that describes a task. Write a response that appropriately completes the request.";
6
6
  /**
@@ -11,23 +11,25 @@ const DEFAULT_SYSTEM_PROMPT_NO_INPUT = "Below is an instruction that describes a
11
11
  *
12
12
  * @see https://github.com/tatsu-lab/stanford_alpaca#data-release
13
13
  */
14
- const AlpacaInstructionPromptFormat = () => ({
15
- stopSequences: [],
16
- format: (instruction) => {
17
- let text = instruction.system ??
18
- (instruction.input != null
19
- ? DEFAULT_SYSTEM_PROMPT_INPUT
20
- : DEFAULT_SYSTEM_PROMPT_NO_INPUT);
21
- text += "\n\n### Instruction:\n";
22
- if (instruction.system != null) {
23
- text += `${instruction.system}\n`;
24
- }
25
- text += instruction.instruction;
26
- if (instruction.input != null) {
27
- text += `\n\n### Input:\n${instruction.input}`;
28
- }
29
- text += "\n\n### Response:\n";
30
- return text;
31
- },
32
- });
33
- exports.AlpacaInstructionPromptFormat = AlpacaInstructionPromptFormat;
14
+ function mapInstructionPromptToAlpacaFormat() {
15
+ return {
16
+ stopSequences: [],
17
+ format: (instruction) => {
18
+ let text = instruction.system ??
19
+ (instruction.input != null
20
+ ? DEFAULT_SYSTEM_PROMPT_INPUT
21
+ : DEFAULT_SYSTEM_PROMPT_NO_INPUT);
22
+ text += "\n\n### Instruction:\n";
23
+ if (instruction.system != null) {
24
+ text += `${instruction.system}\n`;
25
+ }
26
+ text += instruction.instruction;
27
+ if (instruction.input != null) {
28
+ text += `\n\n### Input:\n${instruction.input}`;
29
+ }
30
+ text += "\n\n### Response:\n";
31
+ return text;
32
+ },
33
+ };
34
+ }
35
+ exports.mapInstructionPromptToAlpacaFormat = mapInstructionPromptToAlpacaFormat;
@@ -8,4 +8,4 @@ import { PromptFormat } from "./PromptFormat.js";
8
8
  *
9
9
  * @see https://github.com/tatsu-lab/stanford_alpaca#data-release
10
10
  */
11
- export declare const AlpacaInstructionPromptFormat: () => PromptFormat<InstructionPrompt, string>;
11
+ export declare function mapInstructionPromptToAlpacaFormat(): PromptFormat<InstructionPrompt, string>;
@@ -8,22 +8,24 @@ const DEFAULT_SYSTEM_PROMPT_NO_INPUT = "Below is an instruction that describes a
8
8
  *
9
9
  * @see https://github.com/tatsu-lab/stanford_alpaca#data-release
10
10
  */
11
- export const AlpacaInstructionPromptFormat = () => ({
12
- stopSequences: [],
13
- format: (instruction) => {
14
- let text = instruction.system ??
15
- (instruction.input != null
16
- ? DEFAULT_SYSTEM_PROMPT_INPUT
17
- : DEFAULT_SYSTEM_PROMPT_NO_INPUT);
18
- text += "\n\n### Instruction:\n";
19
- if (instruction.system != null) {
20
- text += `${instruction.system}\n`;
21
- }
22
- text += instruction.instruction;
23
- if (instruction.input != null) {
24
- text += `\n\n### Input:\n${instruction.input}`;
25
- }
26
- text += "\n\n### Response:\n";
27
- return text;
28
- },
29
- });
11
+ export function mapInstructionPromptToAlpacaFormat() {
12
+ return {
13
+ stopSequences: [],
14
+ format: (instruction) => {
15
+ let text = instruction.system ??
16
+ (instruction.input != null
17
+ ? DEFAULT_SYSTEM_PROMPT_INPUT
18
+ : DEFAULT_SYSTEM_PROMPT_NO_INPUT);
19
+ text += "\n\n### Instruction:\n";
20
+ if (instruction.system != null) {
21
+ text += `${instruction.system}\n`;
22
+ }
23
+ text += instruction.instruction;
24
+ if (instruction.input != null) {
25
+ text += `\n\n### Input:\n${instruction.input}`;
26
+ }
27
+ text += "\n\n### Response:\n";
28
+ return text;
29
+ },
30
+ };
31
+ }
@@ -1,6 +1,15 @@
1
1
  /**
2
2
  * A single instruction prompt. It can contain an optional system message to define the role and behavior of the language model
3
3
  * and an optiona input to provide context for the language model.
4
+ *
5
+ * @example
6
+ * ```ts
7
+ * {
8
+ * system: "You are a celebrated poet.", // optional
9
+ * instruction: "Write a short story about:",
10
+ * input: "a robot learning to love.", // optional
11
+ * }
12
+ * ```
4
13
  */
5
14
  export type InstructionPrompt = {
6
15
  /**
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.Llama2ChatPromptFormat = exports.Llama2InstructionPromptFormat = void 0;
3
+ exports.mapChatPromptToLlama2Format = exports.mapInstructionPromptToLlama2Format = void 0;
4
4
  const validateChatPrompt_js_1 = require("./chat/validateChatPrompt.cjs");
5
5
  // see https://github.com/facebookresearch/llama/blob/6c7fe276574e78057f917549435a2554000a876d/llama/generation.py#L44
6
6
  const BEGIN_SEGMENT = "<s>";
@@ -14,46 +14,50 @@ const END_SYSTEM = "\n<</SYS>>\n\n";
14
14
  *
15
15
  * @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
16
16
  */
17
- const Llama2InstructionPromptFormat = () => ({
18
- stopSequences: [END_SEGMENT],
19
- format: (instruction) => `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction.system != null
20
- ? ` ${BEGIN_SYSTEM}${instruction.system}${END_SYSTEM}`
21
- : ""} ${instruction.instruction}${instruction.input != null ? `\n\n${instruction.input}` : ""} ${END_INSTRUCTION}\n`,
22
- });
23
- exports.Llama2InstructionPromptFormat = Llama2InstructionPromptFormat;
17
+ function mapInstructionPromptToLlama2Format() {
18
+ return {
19
+ stopSequences: [END_SEGMENT],
20
+ format: (instruction) => `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction.system != null
21
+ ? ` ${BEGIN_SYSTEM}${instruction.system}${END_SYSTEM}`
22
+ : ""} ${instruction.instruction}${instruction.input != null ? `\n\n${instruction.input}` : ""} ${END_INSTRUCTION}\n`,
23
+ };
24
+ }
25
+ exports.mapInstructionPromptToLlama2Format = mapInstructionPromptToLlama2Format;
24
26
  /**
25
27
  * Formats a chat prompt as a Llama 2 prompt.
26
28
  */
27
- const Llama2ChatPromptFormat = () => ({
28
- format: (chatPrompt) => {
29
- (0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
30
- let text = "";
31
- for (let i = 0; i < chatPrompt.length; i++) {
32
- const message = chatPrompt[i];
33
- // system message:
34
- if (i === 0 &&
35
- "system" in message &&
36
- typeof message.system === "string") {
37
- // Separate section for system message to simplify implementation
38
- // (this is slightly different from the original instructions):
39
- text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${BEGIN_SYSTEM}${message.system}${END_SYSTEM}${END_INSTRUCTION}${END_SEGMENT}`;
40
- continue;
29
+ function mapChatPromptToLlama2Format() {
30
+ return {
31
+ format: (chatPrompt) => {
32
+ (0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
33
+ let text = "";
34
+ for (let i = 0; i < chatPrompt.length; i++) {
35
+ const message = chatPrompt[i];
36
+ // system message:
37
+ if (i === 0 &&
38
+ "system" in message &&
39
+ typeof message.system === "string") {
40
+ // Separate section for system message to simplify implementation
41
+ // (this is slightly different from the original instructions):
42
+ text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${BEGIN_SYSTEM}${message.system}${END_SYSTEM}${END_INSTRUCTION}${END_SEGMENT}`;
43
+ continue;
44
+ }
45
+ // user message
46
+ if ("user" in message) {
47
+ text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${message.user}${END_INSTRUCTION}`;
48
+ continue;
49
+ }
50
+ // ai message:
51
+ if ("ai" in message) {
52
+ text += `${message.ai}${END_SEGMENT}`;
53
+ continue;
54
+ }
55
+ // unsupported message:
56
+ throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
41
57
  }
42
- // user message
43
- if ("user" in message) {
44
- text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${message.user}${END_INSTRUCTION}`;
45
- continue;
46
- }
47
- // ai message:
48
- if ("ai" in message) {
49
- text += `${message.ai}${END_SEGMENT}`;
50
- continue;
51
- }
52
- // unsupported message:
53
- throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
54
- }
55
- return text;
56
- },
57
- stopSequences: [END_SEGMENT],
58
- });
59
- exports.Llama2ChatPromptFormat = Llama2ChatPromptFormat;
58
+ return text;
59
+ },
60
+ stopSequences: [END_SEGMENT],
61
+ };
62
+ }
63
+ exports.mapChatPromptToLlama2Format = mapChatPromptToLlama2Format;
@@ -6,8 +6,8 @@ import { ChatPrompt } from "./chat/ChatPrompt.js";
6
6
  *
7
7
  * @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
8
8
  */
9
- export declare const Llama2InstructionPromptFormat: () => PromptFormat<InstructionPrompt, string>;
9
+ export declare function mapInstructionPromptToLlama2Format(): PromptFormat<InstructionPrompt, string>;
10
10
  /**
11
11
  * Formats a chat prompt as a Llama 2 prompt.
12
12
  */
13
- export declare const Llama2ChatPromptFormat: () => PromptFormat<ChatPrompt, string>;
13
+ export declare function mapChatPromptToLlama2Format(): PromptFormat<ChatPrompt, string>;
@@ -11,44 +11,48 @@ const END_SYSTEM = "\n<</SYS>>\n\n";
11
11
  *
12
12
  * @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
13
13
  */
14
- export const Llama2InstructionPromptFormat = () => ({
15
- stopSequences: [END_SEGMENT],
16
- format: (instruction) => `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction.system != null
17
- ? ` ${BEGIN_SYSTEM}${instruction.system}${END_SYSTEM}`
18
- : ""} ${instruction.instruction}${instruction.input != null ? `\n\n${instruction.input}` : ""} ${END_INSTRUCTION}\n`,
19
- });
14
+ export function mapInstructionPromptToLlama2Format() {
15
+ return {
16
+ stopSequences: [END_SEGMENT],
17
+ format: (instruction) => `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction.system != null
18
+ ? ` ${BEGIN_SYSTEM}${instruction.system}${END_SYSTEM}`
19
+ : ""} ${instruction.instruction}${instruction.input != null ? `\n\n${instruction.input}` : ""} ${END_INSTRUCTION}\n`,
20
+ };
21
+ }
20
22
  /**
21
23
  * Formats a chat prompt as a Llama 2 prompt.
22
24
  */
23
- export const Llama2ChatPromptFormat = () => ({
24
- format: (chatPrompt) => {
25
- validateChatPrompt(chatPrompt);
26
- let text = "";
27
- for (let i = 0; i < chatPrompt.length; i++) {
28
- const message = chatPrompt[i];
29
- // system message:
30
- if (i === 0 &&
31
- "system" in message &&
32
- typeof message.system === "string") {
33
- // Separate section for system message to simplify implementation
34
- // (this is slightly different from the original instructions):
35
- text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${BEGIN_SYSTEM}${message.system}${END_SYSTEM}${END_INSTRUCTION}${END_SEGMENT}`;
36
- continue;
25
+ export function mapChatPromptToLlama2Format() {
26
+ return {
27
+ format: (chatPrompt) => {
28
+ validateChatPrompt(chatPrompt);
29
+ let text = "";
30
+ for (let i = 0; i < chatPrompt.length; i++) {
31
+ const message = chatPrompt[i];
32
+ // system message:
33
+ if (i === 0 &&
34
+ "system" in message &&
35
+ typeof message.system === "string") {
36
+ // Separate section for system message to simplify implementation
37
+ // (this is slightly different from the original instructions):
38
+ text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${BEGIN_SYSTEM}${message.system}${END_SYSTEM}${END_INSTRUCTION}${END_SEGMENT}`;
39
+ continue;
40
+ }
41
+ // user message
42
+ if ("user" in message) {
43
+ text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${message.user}${END_INSTRUCTION}`;
44
+ continue;
45
+ }
46
+ // ai message:
47
+ if ("ai" in message) {
48
+ text += `${message.ai}${END_SEGMENT}`;
49
+ continue;
50
+ }
51
+ // unsupported message:
52
+ throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
37
53
  }
38
- // user message
39
- if ("user" in message) {
40
- text += `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${message.user}${END_INSTRUCTION}`;
41
- continue;
42
- }
43
- // ai message:
44
- if ("ai" in message) {
45
- text += `${message.ai}${END_SEGMENT}`;
46
- continue;
47
- }
48
- // unsupported message:
49
- throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
50
- }
51
- return text;
52
- },
53
- stopSequences: [END_SEGMENT],
54
- });
54
+ return text;
55
+ },
56
+ stopSequences: [END_SEGMENT],
57
+ };
58
+ }
@@ -1,11 +1,11 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.TextChatPromptFormat = exports.TextInstructionPromptFormat = void 0;
3
+ exports.mapChatPromptToTextFormat = exports.mapInstructionPromptToTextFormat = void 0;
4
4
  const validateChatPrompt_js_1 = require("./chat/validateChatPrompt.cjs");
5
5
  /**
6
6
  * Formats an instruction prompt as a basic text prompt.
7
7
  */
8
- const TextInstructionPromptFormat = () => ({
8
+ const mapInstructionPromptToTextFormat = () => ({
9
9
  stopSequences: [],
10
10
  format: (instruction) => {
11
11
  let text = "";
@@ -19,14 +19,14 @@ const TextInstructionPromptFormat = () => ({
19
19
  return text;
20
20
  },
21
21
  });
22
- exports.TextInstructionPromptFormat = TextInstructionPromptFormat;
22
+ exports.mapInstructionPromptToTextFormat = mapInstructionPromptToTextFormat;
23
23
  /**
24
24
  * Formats a chat prompt as a basic text prompt.
25
25
  *
26
26
  * @param user The label of the user in the chat.
27
27
  * @param ai The name of the AI in the chat.
28
28
  */
29
- const TextChatPromptFormat = ({ user, ai }) => ({
29
+ const mapChatPromptToTextFormat = ({ user, ai }) => ({
30
30
  format: (chatPrompt) => {
31
31
  (0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
32
32
  let text = "";
@@ -58,4 +58,4 @@ const TextChatPromptFormat = ({ user, ai }) => ({
58
58
  },
59
59
  stopSequences: [`\n${user}:`],
60
60
  });
61
- exports.TextChatPromptFormat = TextChatPromptFormat;
61
+ exports.mapChatPromptToTextFormat = mapChatPromptToTextFormat;
@@ -4,14 +4,14 @@ import { ChatPrompt } from "./chat/ChatPrompt.js";
4
4
  /**
5
5
  * Formats an instruction prompt as a basic text prompt.
6
6
  */
7
- export declare const TextInstructionPromptFormat: () => PromptFormat<InstructionPrompt, string>;
7
+ export declare const mapInstructionPromptToTextFormat: () => PromptFormat<InstructionPrompt, string>;
8
8
  /**
9
9
  * Formats a chat prompt as a basic text prompt.
10
10
  *
11
11
  * @param user The label of the user in the chat.
12
12
  * @param ai The name of the AI in the chat.
13
13
  */
14
- export declare const TextChatPromptFormat: ({ user, ai, }: {
14
+ export declare const mapChatPromptToTextFormat: ({ user, ai, }: {
15
15
  user: string;
16
16
  ai: string;
17
17
  }) => PromptFormat<ChatPrompt, string>;