@ai-sdk/vercel 2.0.17 → 2.0.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,21 @@
1
1
  # @ai-sdk/vercel
2
2
 
3
+ ## 2.0.19
4
+
5
+ ### Patch Changes
6
+
7
+ - 4de5a1d: chore: excluded tests from src folder in npm package
8
+ - Updated dependencies [4de5a1d]
9
+ - @ai-sdk/openai-compatible@2.0.18
10
+ - @ai-sdk/provider@3.0.5
11
+ - @ai-sdk/provider-utils@4.0.9
12
+
13
+ ## 2.0.18
14
+
15
+ ### Patch Changes
16
+
17
+ - 2b8369d: chore: add docs to package dist
18
+
3
19
  ## 2.0.17
4
20
 
5
21
  ### Patch Changes
package/dist/index.js CHANGED
@@ -32,7 +32,7 @@ var import_openai_compatible = require("@ai-sdk/openai-compatible");
32
32
  var import_provider_utils = require("@ai-sdk/provider-utils");
33
33
 
34
34
  // src/version.ts
35
- var VERSION = true ? "2.0.17" : "0.0.0-test";
35
+ var VERSION = true ? "2.0.19" : "0.0.0-test";
36
36
 
37
37
  // src/vercel-provider.ts
38
38
  function createVercel(options = {}) {
package/dist/index.mjs CHANGED
@@ -10,7 +10,7 @@ import {
10
10
  } from "@ai-sdk/provider-utils";
11
11
 
12
12
  // src/version.ts
13
- var VERSION = true ? "2.0.17" : "0.0.0-test";
13
+ var VERSION = true ? "2.0.19" : "0.0.0-test";
14
14
 
15
15
  // src/vercel-provider.ts
16
16
  function createVercel(options = {}) {
@@ -0,0 +1,129 @@
1
+ ---
2
+ title: Vercel
3
+ description: Learn how to use Vercel's v0 models with the AI SDK.
4
+ ---
5
+
6
+ # Vercel Provider
7
+
8
+ The [Vercel](https://vercel.com) provider gives you access to the [v0 API](https://v0.app/docs/api/model), designed for building modern web applications. The v0 models support text and image inputs and provide fast streaming responses.
9
+
10
+ You can create your Vercel API key at [v0.dev](https://v0.dev/chat/settings/keys).
11
+
12
+ <Note>
13
+ The v0 API is currently in beta and requires a Premium or Team plan with
14
+ usage-based billing enabled. For details, visit the [pricing
15
+ page](https://v0.dev/pricing). To request a higher limit, contact Vercel at
16
+ support@v0.dev.
17
+ </Note>
18
+
19
+ ## Features
20
+
21
+ - **Framework aware completions**: Evaluated on modern stacks like Next.js and Vercel
22
+ - **Auto-fix**: Identifies and corrects common coding issues during generation
23
+ - **Quick edit**: Streams inline edits as they're available
24
+ - **Multimodal**: Supports both text and image inputs
25
+
26
+ ## Setup
27
+
28
+ The Vercel provider is available via the `@ai-sdk/vercel` module. You can install it with:
29
+
30
+ <Tabs items={['pnpm', 'npm', 'yarn', 'bun']}>
31
+ <Tab>
32
+ <Snippet text="pnpm add @ai-sdk/vercel" dark />
33
+ </Tab>
34
+ <Tab>
35
+ <Snippet text="npm install @ai-sdk/vercel" dark />
36
+ </Tab>
37
+ <Tab>
38
+ <Snippet text="yarn add @ai-sdk/vercel" dark />
39
+ </Tab>
40
+
41
+ <Tab>
42
+ <Snippet text="bun add @ai-sdk/vercel" dark />
43
+ </Tab>
44
+ </Tabs>
45
+
46
+ ## Provider Instance
47
+
48
+ You can import the default provider instance `vercel` from `@ai-sdk/vercel`:
49
+
50
+ ```ts
51
+ import { vercel } from '@ai-sdk/vercel';
52
+ ```
53
+
54
+ If you need a customized setup, you can import `createVercel` from `@ai-sdk/vercel` and create a provider instance with your settings:
55
+
56
+ ```ts
57
+ import { createVercel } from '@ai-sdk/vercel';
58
+
59
+ const vercel = createVercel({
60
+ apiKey: process.env.VERCEL_API_KEY ?? '',
61
+ });
62
+ ```
63
+
64
+ You can use the following optional settings to customize the Vercel provider instance:
65
+
66
+ - **baseURL** _string_
67
+
68
+ Use a different URL prefix for API calls. The default prefix is `https://api.v0.dev/v1`.
69
+
70
+ - **apiKey** _string_
71
+
72
+ API key that is being sent using the `Authorization` header. It defaults to
73
+ the `VERCEL_API_KEY` environment variable.
74
+
75
+ - **headers** _Record&lt;string,string&gt;_
76
+
77
+ Custom headers to include in the requests.
78
+
79
+ - **fetch** _(input: RequestInfo, init?: RequestInit) => Promise&lt;Response&gt;_
80
+
81
+ Custom [fetch](https://developer.mozilla.org/en-US/docs/Web/API/fetch) implementation.
82
+ Defaults to the global `fetch` function.
83
+ You can use it as a middleware to intercept requests,
84
+ or to provide a custom fetch implementation for e.g. testing.
85
+
86
+ ## Language Models
87
+
88
+ You can create language models using a provider instance. The first argument is the model ID, for example:
89
+
90
+ ```ts
91
+ import { vercel } from '@ai-sdk/vercel';
92
+ import { generateText } from 'ai';
93
+
94
+ const { text } = await generateText({
95
+ model: vercel('v0-1.0-md'),
96
+ prompt: 'Create a Next.js AI chatbot',
97
+ });
98
+ ```
99
+
100
+ Vercel language models can also be used in the `streamText` function (see [AI SDK Core](/docs/ai-sdk-core)).
101
+
102
+ ## Models
103
+
104
+ ### v0-1.5-md
105
+
106
+ The `v0-1.5-md` model is for everyday tasks and UI generation.
107
+
108
+ ### v0-1.5-lg
109
+
110
+ The `v0-1.5-lg` model is for advanced thinking or reasoning.
111
+
112
+ ### v0-1.0-md (legacy)
113
+
114
+ The `v0-1.0-md` model is the legacy model served by the v0 API.
115
+
116
+ All v0 models have the following capabilities:
117
+
118
+ - Supports text and image inputs (multimodal)
119
+ - Supports function/tool calls
120
+ - Streaming responses with low latency
121
+ - Optimized for frontend and full-stack web development
122
+
123
+ ## Model Capabilities
124
+
125
+ | Model | Image Input | Object Generation | Tool Usage | Tool Streaming |
126
+ | ----------- | ------------------- | ------------------- | ------------------- | ------------------- |
127
+ | `v0-1.5-md` | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> |
128
+ | `v0-1.5-lg` | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> |
129
+ | `v0-1.0-md` | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> |
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ai-sdk/vercel",
3
- "version": "2.0.17",
3
+ "version": "2.0.19",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -8,10 +8,18 @@
8
8
  "types": "./dist/index.d.ts",
9
9
  "files": [
10
10
  "dist/**/*",
11
+ "docs/**/*",
11
12
  "src",
13
+ "!src/**/*.test.ts",
14
+ "!src/**/*.test-d.ts",
15
+ "!src/**/__snapshots__",
16
+ "!src/**/__fixtures__",
12
17
  "CHANGELOG.md",
13
18
  "README.md"
14
19
  ],
20
+ "directories": {
21
+ "doc": "./docs"
22
+ },
15
23
  "exports": {
16
24
  "./package.json": "./package.json",
17
25
  ".": {
@@ -21,9 +29,9 @@
21
29
  }
22
30
  },
23
31
  "dependencies": {
24
- "@ai-sdk/openai-compatible": "2.0.17",
25
- "@ai-sdk/provider": "3.0.4",
26
- "@ai-sdk/provider-utils": "4.0.8"
32
+ "@ai-sdk/openai-compatible": "2.0.18",
33
+ "@ai-sdk/provider": "3.0.5",
34
+ "@ai-sdk/provider-utils": "4.0.9"
27
35
  },
28
36
  "devDependencies": {
29
37
  "@types/node": "20.17.24",
@@ -55,7 +63,7 @@
55
63
  "scripts": {
56
64
  "build": "pnpm clean && tsup --tsconfig tsconfig.build.json",
57
65
  "build:watch": "pnpm clean && tsup --watch",
58
- "clean": "del-cli dist *.tsbuildinfo",
66
+ "clean": "del-cli dist docs *.tsbuildinfo",
59
67
  "lint": "eslint \"./**/*.ts*\"",
60
68
  "type-check": "tsc --build",
61
69
  "prettier-check": "prettier --check \"./**/*.ts*\"",
@@ -1,102 +0,0 @@
1
- import { createVercel } from './vercel-provider';
2
- import { OpenAICompatibleChatLanguageModel } from '@ai-sdk/openai-compatible';
3
- import { LanguageModelV3 } from '@ai-sdk/provider';
4
- import { loadApiKey } from '@ai-sdk/provider-utils';
5
- import { describe, it, expect, vi, beforeEach, Mock } from 'vitest';
6
-
7
- const OpenAICompatibleChatLanguageModelMock =
8
- OpenAICompatibleChatLanguageModel as unknown as Mock;
9
-
10
- vi.mock('@ai-sdk/openai-compatible', () => ({
11
- OpenAICompatibleChatLanguageModel: vi.fn(),
12
- OpenAICompatibleCompletionLanguageModel: vi.fn(),
13
- }));
14
-
15
- vi.mock('@ai-sdk/provider-utils', async () => {
16
- const actual = await vi.importActual('@ai-sdk/provider-utils');
17
- return {
18
- ...actual,
19
- loadApiKey: vi.fn().mockReturnValue('mock-api-key'),
20
- withoutTrailingSlash: vi.fn(url => url),
21
- };
22
- });
23
-
24
- vi.mock('./vercel-image-model', () => ({
25
- VercelImageModel: vi.fn(),
26
- }));
27
-
28
- describe('VercelProvider', () => {
29
- let mockLanguageModel: LanguageModelV3;
30
-
31
- beforeEach(() => {
32
- mockLanguageModel = {
33
- // Add any required methods for LanguageModelV1
34
- } as LanguageModelV3;
35
-
36
- // Reset mocks
37
- vi.clearAllMocks();
38
- });
39
-
40
- describe('createVercel', () => {
41
- it('should create a VercelProvider instance with default options', () => {
42
- const provider = createVercel();
43
- provider('model-id');
44
-
45
- // Use the mocked version
46
- const constructorCall =
47
- OpenAICompatibleChatLanguageModelMock.mock.calls[0];
48
- const config = constructorCall[1];
49
- config.headers();
50
-
51
- expect(loadApiKey).toHaveBeenCalledWith({
52
- apiKey: undefined,
53
- environmentVariableName: 'VERCEL_API_KEY',
54
- description: 'Vercel',
55
- });
56
- });
57
-
58
- it('should create a VercelProvider instance with custom options', () => {
59
- const options = {
60
- apiKey: 'custom-key',
61
- baseURL: 'https://custom.url',
62
- headers: { 'Custom-Header': 'value' },
63
- };
64
- const provider = createVercel(options);
65
- provider('model-id');
66
-
67
- const constructorCall =
68
- OpenAICompatibleChatLanguageModelMock.mock.calls[0];
69
- const config = constructorCall[1];
70
- config.headers();
71
-
72
- expect(loadApiKey).toHaveBeenCalledWith({
73
- apiKey: 'custom-key',
74
- environmentVariableName: 'VERCEL_API_KEY',
75
- description: 'Vercel',
76
- });
77
- });
78
-
79
- it('should return a chat model when called as a function', () => {
80
- const provider = createVercel();
81
- const modelId = 'foo-model-id';
82
-
83
- const model = provider(modelId);
84
- expect(model).toBeInstanceOf(OpenAICompatibleChatLanguageModel);
85
- });
86
- });
87
-
88
- it('should construct a language model with correct configuration', () => {
89
- const provider = createVercel();
90
- const modelId = 'vercel-chat-model';
91
-
92
- const model = provider.languageModel(modelId);
93
-
94
- expect(model).toBeInstanceOf(OpenAICompatibleChatLanguageModel);
95
- expect(OpenAICompatibleChatLanguageModelMock).toHaveBeenCalledWith(
96
- modelId,
97
- expect.objectContaining({
98
- provider: 'vercel.chat',
99
- }),
100
- );
101
- });
102
- });