ai 2.1.25 → 2.1.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,11 +1,11 @@
1
1
  # Vercel AI SDK
2
2
 
3
- The Vercel AI SDK is **a library for building edge-ready AI-powered streaming text and chat UIs**.
3
+ The Vercel AI SDK is **a library for building AI-powered streaming text and chat UIs**.
4
4
 
5
5
  ## Features
6
6
 
7
- - [SWR](https://swr.vercel.app)-powered React, Svelte and Vue helpers for streaming text responses and building chat and completion UIs
8
- - First-class support for [LangChain](js.langchain.com/docs) and [OpenAI](https://openai.com), [Anthropic](https://www.anthropic.com), and [Hugging Face](https://huggingface.co)
7
+ - [SWR](https://swr.vercel.app)-powered React, Svelte, Vue and Solid helpers for streaming text responses and building chat and completion UIs
8
+ - First-class support for [LangChain](js.langchain.com/docs) and [OpenAI](https://openai.com), [Anthropic](https://www.anthropic.com), [Cohere](https://cohere.com) and [Hugging Face](https://huggingface.co)
9
9
  - Node.js, Serverless, and [Edge Runtime](https://edge-runtime.vercel.app/) support
10
10
  - Callbacks for saving completed streaming responses to a database (in the same request)
11
11
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai",
3
- "version": "2.1.25",
3
+ "version": "2.1.26",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -79,8 +79,8 @@
79
79
  "ts-jest": "29.0.3",
80
80
  "tsup": "^6.7.0",
81
81
  "typescript": "5.1.3",
82
- "@vercel/ai-tsconfig": "0.0.0",
83
- "eslint-config-vercel-ai": "0.0.0"
82
+ "eslint-config-vercel-ai": "0.0.0",
83
+ "@vercel/ai-tsconfig": "0.0.0"
84
84
  },
85
85
  "peerDependencies": {
86
86
  "react": "^18.2.0",
@@ -33,5 +33,11 @@ declare function experimental_buildStarChatBetaPrompt(messages: Pick<Message, 'c
33
33
  * @see https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5
34
34
  */
35
35
  declare function experimental_buildOpenAssistantPrompt(messages: Pick<Message, 'content' | 'role'>[]): string;
36
+ /**
37
+ * A prompt constructor for HuggingFace LLama 2 chat models.
38
+ * Does not support `function` messages.
39
+ * @see https://huggingface.co/meta-llama/Llama-2-70b-chat-hf and https://huggingface.co/blog/llama2#how-to-prompt-llama-2
40
+ */
41
+ declare function experimental_buildLlama2Prompt(messages: Pick<Message, 'content' | 'role'>[]): string;
36
42
 
37
- export { experimental_buildOpenAssistantPrompt, experimental_buildStarChatBetaPrompt };
43
+ export { experimental_buildLlama2Prompt, experimental_buildOpenAssistantPrompt, experimental_buildStarChatBetaPrompt };
@@ -20,6 +20,7 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // prompts/index.ts
21
21
  var prompts_exports = {};
22
22
  __export(prompts_exports, {
23
+ experimental_buildLlama2Prompt: () => experimental_buildLlama2Prompt,
23
24
  experimental_buildOpenAssistantPrompt: () => experimental_buildOpenAssistantPrompt,
24
25
  experimental_buildStarChatBetaPrompt: () => experimental_buildStarChatBetaPrompt
25
26
  });
@@ -58,8 +59,31 @@ function experimental_buildOpenAssistantPrompt(messages) {
58
59
  }
59
60
  }).join("") + "<|assistant|>";
60
61
  }
62
+ function experimental_buildLlama2Prompt(messages) {
63
+ const startPrompt = `<s>[INST] `;
64
+ const endPrompt = ` [/INST]`;
65
+ const conversation = messages.map(({ content, role }, index) => {
66
+ if (role === "user") {
67
+ return content.trim();
68
+ } else if (role === "assistant") {
69
+ return ` [/INST] ${content}</s><s>[INST] `;
70
+ } else if (role === "function") {
71
+ throw new Error("Llama 2 does not support function calls.");
72
+ } else if (role === "system" && index === 0) {
73
+ return `<<SYS>>
74
+ ${content}
75
+ <</SYS>>
76
+
77
+ `;
78
+ } else {
79
+ throw new Error(`Invalid message role: ${role}`);
80
+ }
81
+ });
82
+ return startPrompt + conversation.join("") + endPrompt;
83
+ }
61
84
  // Annotate the CommonJS export names for ESM import in node:
62
85
  0 && (module.exports = {
86
+ experimental_buildLlama2Prompt,
63
87
  experimental_buildOpenAssistantPrompt,
64
88
  experimental_buildStarChatBetaPrompt
65
89
  });
@@ -31,7 +31,30 @@ function experimental_buildOpenAssistantPrompt(messages) {
31
31
  }
32
32
  }).join("") + "<|assistant|>";
33
33
  }
34
+ function experimental_buildLlama2Prompt(messages) {
35
+ const startPrompt = `<s>[INST] `;
36
+ const endPrompt = ` [/INST]`;
37
+ const conversation = messages.map(({ content, role }, index) => {
38
+ if (role === "user") {
39
+ return content.trim();
40
+ } else if (role === "assistant") {
41
+ return ` [/INST] ${content}</s><s>[INST] `;
42
+ } else if (role === "function") {
43
+ throw new Error("Llama 2 does not support function calls.");
44
+ } else if (role === "system" && index === 0) {
45
+ return `<<SYS>>
46
+ ${content}
47
+ <</SYS>>
48
+
49
+ `;
50
+ } else {
51
+ throw new Error(`Invalid message role: ${role}`);
52
+ }
53
+ });
54
+ return startPrompt + conversation.join("") + endPrompt;
55
+ }
34
56
  export {
57
+ experimental_buildLlama2Prompt,
35
58
  experimental_buildOpenAssistantPrompt,
36
59
  experimental_buildStarChatBetaPrompt
37
60
  };