modelfusion 0.1.1 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -377,6 +377,18 @@ const { chunks } = await retrieveTextChunks(
377
377
  - [Memory](https://modelfusion.dev/integration/vector-index/memory)
378
378
  - [Pinecone](https://modelfusion.dev/integration/vector-index/pinecone)
379
379
 
380
+ ### Prompt Mappings
381
+
382
+ Use higher level prompts that are mapped into model specific prompts.
383
+
384
+ | Model | Instruction Prompt | Chat Prompt |
385
+ | ------------ | ------------------ | ----------- |
386
+ | OpenAI | ✅ | ✅ |
387
+ | Llama 2 | ✅ | ✅ |
388
+ | Alpaca | ✅ | ❌ |
389
+ | Vicuna | ❌ | ✅ |
390
+ | Generic Text | ✅ | ✅ |
391
+
380
392
  ## Documentation
381
393
 
382
394
  - [Guide](https://modelfusion.dev/guide)
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
3
  "description": "Build AI applications, chatbots, and agents with JavaScript and TypeScript.",
4
- "version": "0.1.1",
4
+ "version": "0.2.0",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [
@@ -65,10 +65,10 @@
65
65
  "@typescript-eslint/parser": "^6.1.0",
66
66
  "copyfiles": "2.4.1",
67
67
  "eslint": "^8.45.0",
68
- "eslint-config-prettier": "8.9.0",
68
+ "eslint-config-prettier": "8.10.0",
69
69
  "husky": "^8.0.3",
70
70
  "lint-staged": "13.2.3",
71
- "prettier": "3.0.0",
71
+ "prettier": "3.0.1",
72
72
  "rimraf": "5.0.1",
73
73
  "typescript": "5.1.6",
74
74
  "zod": "3.21.4",
@@ -0,0 +1,33 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.InstructionToAlpacaPromptMapping = void 0;
4
+ const DEFAULT_SYSTEM_PROMPT_INPUT = "Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.";
5
+ const DEFAULT_SYSTEM_PROMPT_NO_INPUT = "Below is an instruction that describes a task. Write a response that appropriately completes the request.";
6
+ /**
7
+ * Maps an instruction prompt to the Alpaca prompt format.
8
+ *
9
+ * If the instruction has a system prompt, it overrides the default system prompt
10
+ * (which can impact the results, because the model may be trained on the default system prompt).
11
+ *
12
+ * @see https://github.com/tatsu-lab/stanford_alpaca#data-release
13
+ */
14
+ const InstructionToAlpacaPromptMapping = () => ({
15
+ stopTokens: [],
16
+ map: (instruction) => {
17
+ let text = instruction.system ??
18
+ (instruction.input != null
19
+ ? DEFAULT_SYSTEM_PROMPT_INPUT
20
+ : DEFAULT_SYSTEM_PROMPT_NO_INPUT);
21
+ text += "\n\n### Instruction:\n";
22
+ if (instruction.system != null) {
23
+ text += `${instruction.system}\n`;
24
+ }
25
+ text += instruction.instruction;
26
+ if (instruction.input != null) {
27
+ text += `\n\n### Input:\n${instruction.input}`;
28
+ }
29
+ text += "\n\n### Response:\n";
30
+ return text;
31
+ },
32
+ });
33
+ exports.InstructionToAlpacaPromptMapping = InstructionToAlpacaPromptMapping;
@@ -0,0 +1,11 @@
1
+ import { InstructionPrompt } from "./InstructionPrompt.js";
2
+ import { PromptMapping } from "./PromptMapping.js";
3
+ /**
4
+ * Maps an instruction prompt to the Alpaca prompt format.
5
+ *
6
+ * If the instruction has a system prompt, it overrides the default system prompt
7
+ * (which can impact the results, because the model may be trained on the default system prompt).
8
+ *
9
+ * @see https://github.com/tatsu-lab/stanford_alpaca#data-release
10
+ */
11
+ export declare const InstructionToAlpacaPromptMapping: () => PromptMapping<InstructionPrompt, string>;
@@ -0,0 +1,29 @@
1
+ const DEFAULT_SYSTEM_PROMPT_INPUT = "Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.";
2
+ const DEFAULT_SYSTEM_PROMPT_NO_INPUT = "Below is an instruction that describes a task. Write a response that appropriately completes the request.";
3
+ /**
4
+ * Maps an instruction prompt to the Alpaca prompt format.
5
+ *
6
+ * If the instruction has a system prompt, it overrides the default system prompt
7
+ * (which can impact the results, because the model may be trained on the default system prompt).
8
+ *
9
+ * @see https://github.com/tatsu-lab/stanford_alpaca#data-release
10
+ */
11
+ export const InstructionToAlpacaPromptMapping = () => ({
12
+ stopTokens: [],
13
+ map: (instruction) => {
14
+ let text = instruction.system ??
15
+ (instruction.input != null
16
+ ? DEFAULT_SYSTEM_PROMPT_INPUT
17
+ : DEFAULT_SYSTEM_PROMPT_NO_INPUT);
18
+ text += "\n\n### Instruction:\n";
19
+ if (instruction.system != null) {
20
+ text += `${instruction.system}\n`;
21
+ }
22
+ text += instruction.instruction;
23
+ if (instruction.input != null) {
24
+ text += `\n\n### Input:\n${instruction.input}`;
25
+ }
26
+ text += "\n\n### Response:\n";
27
+ return text;
28
+ },
29
+ });
@@ -1,7 +1,19 @@
1
1
  /**
2
- * A single instruction prompt. It can contain an optional system message to provide context for the language model.
2
+ * A single instruction prompt. It can contain an optional system message to define the role and behavior of the language model
3
+ * and an optiona input to provide context for the language model.
3
4
  */
4
5
  export type InstructionPrompt = {
6
+ /**
7
+ * Optional system message to provide context for the language model. Note that for some models,
8
+ * changing the system message can impact the results, because the model may be trained on the default system message.
9
+ */
5
10
  system?: string;
11
+ /**
12
+ * The instruction for the model.
13
+ */
6
14
  instruction: string;
15
+ /**
16
+ * Optional additional input or context, e.g. a the content from which information should be extracted.
17
+ */
18
+ input?: string;
7
19
  };
@@ -18,7 +18,7 @@ const InstructionToLlama2PromptMapping = () => ({
18
18
  stopTokens: [END_SEGMENT],
19
19
  map: (instruction) => `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction.system != null
20
20
  ? ` ${BEGIN_SYSTEM}${instruction.system}${END_SYSTEM}`
21
- : ""} ${instruction.instruction} ${END_INSTRUCTION}\n`,
21
+ : ""} ${instruction.instruction}${instruction.input != null ? `\n\n${instruction.input}` : ""} ${END_INSTRUCTION}\n`,
22
22
  });
23
23
  exports.InstructionToLlama2PromptMapping = InstructionToLlama2PromptMapping;
24
24
  const ChatToLlama2PromptMapping = () => ({
@@ -15,7 +15,7 @@ export const InstructionToLlama2PromptMapping = () => ({
15
15
  stopTokens: [END_SEGMENT],
16
16
  map: (instruction) => `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${instruction.system != null
17
17
  ? ` ${BEGIN_SYSTEM}${instruction.system}${END_SYSTEM}`
18
- : ""} ${instruction.instruction} ${END_INSTRUCTION}\n`,
18
+ : ""} ${instruction.instruction}${instruction.input != null ? `\n\n${instruction.input}` : ""} ${END_INSTRUCTION}\n`,
19
19
  });
20
20
  export const ChatToLlama2PromptMapping = () => ({
21
21
  map: (chatPrompt) => {
@@ -15,6 +15,12 @@ const InstructionToOpenAIChatPromptMapping = () => ({
15
15
  role: "user",
16
16
  content: instruction.instruction,
17
17
  });
18
+ if (instruction.input != null) {
19
+ messages.push({
20
+ role: "user",
21
+ content: instruction.input,
22
+ });
23
+ }
18
24
  return messages;
19
25
  },
20
26
  stopTokens: [],
@@ -12,6 +12,12 @@ export const InstructionToOpenAIChatPromptMapping = () => ({
12
12
  role: "user",
13
13
  content: instruction.instruction,
14
14
  });
15
+ if (instruction.input != null) {
16
+ messages.push({
17
+ role: "user",
18
+ content: instruction.input,
19
+ });
20
+ }
15
21
  return messages;
16
22
  },
17
23
  stopTokens: [],
@@ -4,9 +4,17 @@ exports.ChatToTextPromptMapping = exports.InstructionToTextPromptMapping = void
4
4
  const validateChatPrompt_js_1 = require("./chat/validateChatPrompt.cjs");
5
5
  const InstructionToTextPromptMapping = () => ({
6
6
  stopTokens: [],
7
- map: (instruction) => instruction.system != null
8
- ? `${instruction.system}\n\n${instruction.instruction}`
9
- : instruction.instruction,
7
+ map: (instruction) => {
8
+ let text = "";
9
+ if (instruction.system != null) {
10
+ text += `${instruction.system}\n\n`;
11
+ }
12
+ text += instruction.instruction;
13
+ if (instruction.input != null) {
14
+ text += `\n\n${instruction.input}`;
15
+ }
16
+ return text;
17
+ },
10
18
  });
11
19
  exports.InstructionToTextPromptMapping = InstructionToTextPromptMapping;
12
20
  /**
@@ -1,9 +1,17 @@
1
1
  import { validateChatPrompt } from "./chat/validateChatPrompt.js";
2
2
  export const InstructionToTextPromptMapping = () => ({
3
3
  stopTokens: [],
4
- map: (instruction) => instruction.system != null
5
- ? `${instruction.system}\n\n${instruction.instruction}`
6
- : instruction.instruction,
4
+ map: (instruction) => {
5
+ let text = "";
6
+ if (instruction.system != null) {
7
+ text += `${instruction.system}\n\n`;
8
+ }
9
+ text += instruction.instruction;
10
+ if (instruction.input != null) {
11
+ text += `\n\n${instruction.input}`;
12
+ }
13
+ return text;
14
+ },
7
15
  });
8
16
  /**
9
17
  * A mapping from a chat prompt to a text prompt.
@@ -0,0 +1,55 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ChatToVicunaPromptMapping = void 0;
4
+ const validateChatPrompt_js_1 = require("./chat/validateChatPrompt.cjs");
5
+ const DEFAULT_SYSTEM_PROMPT = "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.";
6
+ /**
7
+ * A mapping from a chat prompt to a Vicuna prompt.
8
+ *
9
+ * Overridding the system message in the first chat message can affect model respones.
10
+ *
11
+ * Vicuna prompt template:
12
+ * ```
13
+ * A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.
14
+ *
15
+ * USER: {prompt}
16
+ * ASSISTANT:
17
+ * ```
18
+ */
19
+ const ChatToVicunaPromptMapping = () => ({
20
+ map: (chatPrompt) => {
21
+ (0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
22
+ let text = "";
23
+ for (let i = 0; i < chatPrompt.length; i++) {
24
+ const message = chatPrompt[i];
25
+ // system message:
26
+ if (i === 0 &&
27
+ "system" in message &&
28
+ typeof message.system === "string") {
29
+ text += `${message.system}\n\n`;
30
+ continue;
31
+ }
32
+ // first message was not a system message:
33
+ if (i === 0) {
34
+ text += `${DEFAULT_SYSTEM_PROMPT}\n\n`;
35
+ }
36
+ // user message
37
+ if ("user" in message) {
38
+ text += `USER: ${message.user}\n`;
39
+ continue;
40
+ }
41
+ // ai message:
42
+ if ("ai" in message) {
43
+ text += `ASSISTANT:\n${message.ai}\n`;
44
+ continue;
45
+ }
46
+ // unsupported message:
47
+ throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
48
+ }
49
+ // AI message prefix:
50
+ text += `ASSISTANT: `;
51
+ return text;
52
+ },
53
+ stopTokens: [`\nUSER:`],
54
+ });
55
+ exports.ChatToVicunaPromptMapping = ChatToVicunaPromptMapping;
@@ -0,0 +1,16 @@
1
+ import { PromptMapping } from "./PromptMapping.js";
2
+ import { ChatPrompt } from "./chat/ChatPrompt.js";
3
+ /**
4
+ * A mapping from a chat prompt to a Vicuna prompt.
5
+ *
6
+ * Overridding the system message in the first chat message can affect model respones.
7
+ *
8
+ * Vicuna prompt template:
9
+ * ```
10
+ * A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.
11
+ *
12
+ * USER: {prompt}
13
+ * ASSISTANT:
14
+ * ```
15
+ */
16
+ export declare const ChatToVicunaPromptMapping: () => PromptMapping<ChatPrompt, string>;
@@ -0,0 +1,51 @@
1
+ import { validateChatPrompt } from "./chat/validateChatPrompt.js";
2
+ const DEFAULT_SYSTEM_PROMPT = "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.";
3
+ /**
4
+ * A mapping from a chat prompt to a Vicuna prompt.
5
+ *
6
+ * Overridding the system message in the first chat message can affect model respones.
7
+ *
8
+ * Vicuna prompt template:
9
+ * ```
10
+ * A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.
11
+ *
12
+ * USER: {prompt}
13
+ * ASSISTANT:
14
+ * ```
15
+ */
16
+ export const ChatToVicunaPromptMapping = () => ({
17
+ map: (chatPrompt) => {
18
+ validateChatPrompt(chatPrompt);
19
+ let text = "";
20
+ for (let i = 0; i < chatPrompt.length; i++) {
21
+ const message = chatPrompt[i];
22
+ // system message:
23
+ if (i === 0 &&
24
+ "system" in message &&
25
+ typeof message.system === "string") {
26
+ text += `${message.system}\n\n`;
27
+ continue;
28
+ }
29
+ // first message was not a system message:
30
+ if (i === 0) {
31
+ text += `${DEFAULT_SYSTEM_PROMPT}\n\n`;
32
+ }
33
+ // user message
34
+ if ("user" in message) {
35
+ text += `USER: ${message.user}\n`;
36
+ continue;
37
+ }
38
+ // ai message:
39
+ if ("ai" in message) {
40
+ text += `ASSISTANT:\n${message.ai}\n`;
41
+ continue;
42
+ }
43
+ // unsupported message:
44
+ throw new Error(`Unsupported message: ${JSON.stringify(message)}`);
45
+ }
46
+ // AI message prefix:
47
+ text += `ASSISTANT: `;
48
+ return text;
49
+ },
50
+ stopTokens: [`\nUSER:`],
51
+ });
package/prompt/index.cjs CHANGED
@@ -14,12 +14,14 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
14
  for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
15
  };
16
16
  Object.defineProperty(exports, "__esModule", { value: true });
17
+ __exportStar(require("./AlpacaPromptMapping.cjs"), exports);
17
18
  __exportStar(require("./InstructionPrompt.cjs"), exports);
18
19
  __exportStar(require("./Llama2PromptMapping.cjs"), exports);
19
20
  __exportStar(require("./OpenAIChatPromptMapping.cjs"), exports);
20
21
  __exportStar(require("./PromptMapping.cjs"), exports);
21
22
  __exportStar(require("./PromptMappingTextGenerationModel.cjs"), exports);
22
23
  __exportStar(require("./TextPromptMapping.cjs"), exports);
24
+ __exportStar(require("./VicunaPromptMapping.cjs"), exports);
23
25
  __exportStar(require("./chat/ChatPrompt.cjs"), exports);
24
26
  __exportStar(require("./chat/trimChatPrompt.cjs"), exports);
25
27
  __exportStar(require("./chat/validateChatPrompt.cjs"), exports);
package/prompt/index.d.ts CHANGED
@@ -1,9 +1,11 @@
1
+ export * from "./AlpacaPromptMapping.js";
1
2
  export * from "./InstructionPrompt.js";
2
3
  export * from "./Llama2PromptMapping.js";
3
4
  export * from "./OpenAIChatPromptMapping.js";
4
5
  export * from "./PromptMapping.js";
5
6
  export * from "./PromptMappingTextGenerationModel.js";
6
7
  export * from "./TextPromptMapping.js";
8
+ export * from "./VicunaPromptMapping.js";
7
9
  export * from "./chat/ChatPrompt.js";
8
10
  export * from "./chat/trimChatPrompt.js";
9
11
  export * from "./chat/validateChatPrompt.js";
package/prompt/index.js CHANGED
@@ -1,9 +1,11 @@
1
+ export * from "./AlpacaPromptMapping.js";
1
2
  export * from "./InstructionPrompt.js";
2
3
  export * from "./Llama2PromptMapping.js";
3
4
  export * from "./OpenAIChatPromptMapping.js";
4
5
  export * from "./PromptMapping.js";
5
6
  export * from "./PromptMappingTextGenerationModel.js";
6
7
  export * from "./TextPromptMapping.js";
8
+ export * from "./VicunaPromptMapping.js";
7
9
  export * from "./chat/ChatPrompt.js";
8
10
  export * from "./chat/trimChatPrompt.js";
9
11
  export * from "./chat/validateChatPrompt.js";