@ai-sdk/vercel 0.0.0-70e0935a-20260114150030 → 0.0.0-98261322-20260122142521
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +64 -5
- package/dist/index.js +1 -1
- package/dist/index.mjs +1 -1
- package/docs/02-vercel.mdx +129 -0
- package/package.json +10 -5
- package/src/index.ts +4 -0
- package/src/vercel-chat-options.ts +6 -0
- package/src/vercel-provider.test.ts +102 -0
- package/src/vercel-provider.ts +107 -0
- package/src/version.ts +6 -0
package/CHANGELOG.md
CHANGED
|
@@ -1,13 +1,72 @@
|
|
|
1
1
|
# @ai-sdk/vercel
|
|
2
2
|
|
|
3
|
-
## 0.0.0-
|
|
3
|
+
## 0.0.0-98261322-20260122142521
|
|
4
4
|
|
|
5
5
|
### Patch Changes
|
|
6
6
|
|
|
7
|
-
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
7
|
+
- 080559b: chore: add docs to package dist
|
|
8
|
+
|
|
9
|
+
## 2.0.17
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- 8dc54db: chore: add src folders to package bundle
|
|
14
|
+
- Updated dependencies [8dc54db]
|
|
15
|
+
- @ai-sdk/openai-compatible@2.0.17
|
|
16
|
+
|
|
17
|
+
## 2.0.16
|
|
18
|
+
|
|
19
|
+
### Patch Changes
|
|
20
|
+
|
|
21
|
+
- Updated dependencies [78555ad]
|
|
22
|
+
- @ai-sdk/openai-compatible@2.0.16
|
|
23
|
+
|
|
24
|
+
## 2.0.15
|
|
25
|
+
|
|
26
|
+
### Patch Changes
|
|
27
|
+
|
|
28
|
+
- Updated dependencies [7116ef3]
|
|
29
|
+
- @ai-sdk/openai-compatible@2.0.15
|
|
30
|
+
|
|
31
|
+
## 2.0.14
|
|
32
|
+
|
|
33
|
+
### Patch Changes
|
|
34
|
+
|
|
35
|
+
- Updated dependencies [1612a57]
|
|
36
|
+
- @ai-sdk/openai-compatible@2.0.14
|
|
37
|
+
|
|
38
|
+
## 2.0.13
|
|
39
|
+
|
|
40
|
+
### Patch Changes
|
|
41
|
+
|
|
42
|
+
- Updated dependencies [5c090e7]
|
|
43
|
+
- @ai-sdk/provider@3.0.4
|
|
44
|
+
- @ai-sdk/openai-compatible@2.0.13
|
|
45
|
+
- @ai-sdk/provider-utils@4.0.8
|
|
46
|
+
|
|
47
|
+
## 2.0.12
|
|
48
|
+
|
|
49
|
+
### Patch Changes
|
|
50
|
+
|
|
51
|
+
- Updated dependencies [78a133a]
|
|
52
|
+
- @ai-sdk/openai-compatible@2.0.12
|
|
53
|
+
|
|
54
|
+
## 2.0.11
|
|
55
|
+
|
|
56
|
+
### Patch Changes
|
|
57
|
+
|
|
58
|
+
- Updated dependencies [46f46e4]
|
|
59
|
+
- @ai-sdk/provider-utils@4.0.7
|
|
60
|
+
- @ai-sdk/openai-compatible@2.0.11
|
|
61
|
+
|
|
62
|
+
## 2.0.10
|
|
63
|
+
|
|
64
|
+
### Patch Changes
|
|
65
|
+
|
|
66
|
+
- Updated dependencies [1b11dcb]
|
|
67
|
+
- @ai-sdk/provider-utils@4.0.6
|
|
68
|
+
- @ai-sdk/provider@3.0.3
|
|
69
|
+
- @ai-sdk/openai-compatible@2.0.10
|
|
11
70
|
|
|
12
71
|
## 2.0.9
|
|
13
72
|
|
package/dist/index.js
CHANGED
|
@@ -32,7 +32,7 @@ var import_openai_compatible = require("@ai-sdk/openai-compatible");
|
|
|
32
32
|
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
33
33
|
|
|
34
34
|
// src/version.ts
|
|
35
|
-
var VERSION = true ? "0.0.0-
|
|
35
|
+
var VERSION = true ? "0.0.0-98261322-20260122142521" : "0.0.0-test";
|
|
36
36
|
|
|
37
37
|
// src/vercel-provider.ts
|
|
38
38
|
function createVercel(options = {}) {
|
package/dist/index.mjs
CHANGED
|
@@ -10,7 +10,7 @@ import {
|
|
|
10
10
|
} from "@ai-sdk/provider-utils";
|
|
11
11
|
|
|
12
12
|
// src/version.ts
|
|
13
|
-
var VERSION = true ? "0.0.0-
|
|
13
|
+
var VERSION = true ? "0.0.0-98261322-20260122142521" : "0.0.0-test";
|
|
14
14
|
|
|
15
15
|
// src/vercel-provider.ts
|
|
16
16
|
function createVercel(options = {}) {
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Vercel
|
|
3
|
+
description: Learn how to use Vercel's v0 models with the AI SDK.
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# Vercel Provider
|
|
7
|
+
|
|
8
|
+
The [Vercel](https://vercel.com) provider gives you access to the [v0 API](https://v0.app/docs/api/model), designed for building modern web applications. The v0 models support text and image inputs and provide fast streaming responses.
|
|
9
|
+
|
|
10
|
+
You can create your Vercel API key at [v0.dev](https://v0.dev/chat/settings/keys).
|
|
11
|
+
|
|
12
|
+
<Note>
|
|
13
|
+
The v0 API is currently in beta and requires a Premium or Team plan with
|
|
14
|
+
usage-based billing enabled. For details, visit the [pricing
|
|
15
|
+
page](https://v0.dev/pricing). To request a higher limit, contact Vercel at
|
|
16
|
+
support@v0.dev.
|
|
17
|
+
</Note>
|
|
18
|
+
|
|
19
|
+
## Features
|
|
20
|
+
|
|
21
|
+
- **Framework aware completions**: Evaluated on modern stacks like Next.js and Vercel
|
|
22
|
+
- **Auto-fix**: Identifies and corrects common coding issues during generation
|
|
23
|
+
- **Quick edit**: Streams inline edits as they're available
|
|
24
|
+
- **Multimodal**: Supports both text and image inputs
|
|
25
|
+
|
|
26
|
+
## Setup
|
|
27
|
+
|
|
28
|
+
The Vercel provider is available via the `@ai-sdk/vercel` module. You can install it with:
|
|
29
|
+
|
|
30
|
+
<Tabs items={['pnpm', 'npm', 'yarn', 'bun']}>
|
|
31
|
+
<Tab>
|
|
32
|
+
<Snippet text="pnpm add @ai-sdk/vercel" dark />
|
|
33
|
+
</Tab>
|
|
34
|
+
<Tab>
|
|
35
|
+
<Snippet text="npm install @ai-sdk/vercel" dark />
|
|
36
|
+
</Tab>
|
|
37
|
+
<Tab>
|
|
38
|
+
<Snippet text="yarn add @ai-sdk/vercel" dark />
|
|
39
|
+
</Tab>
|
|
40
|
+
|
|
41
|
+
<Tab>
|
|
42
|
+
<Snippet text="bun add @ai-sdk/vercel" dark />
|
|
43
|
+
</Tab>
|
|
44
|
+
</Tabs>
|
|
45
|
+
|
|
46
|
+
## Provider Instance
|
|
47
|
+
|
|
48
|
+
You can import the default provider instance `vercel` from `@ai-sdk/vercel`:
|
|
49
|
+
|
|
50
|
+
```ts
|
|
51
|
+
import { vercel } from '@ai-sdk/vercel';
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
If you need a customized setup, you can import `createVercel` from `@ai-sdk/vercel` and create a provider instance with your settings:
|
|
55
|
+
|
|
56
|
+
```ts
|
|
57
|
+
import { createVercel } from '@ai-sdk/vercel';
|
|
58
|
+
|
|
59
|
+
const vercel = createVercel({
|
|
60
|
+
apiKey: process.env.VERCEL_API_KEY ?? '',
|
|
61
|
+
});
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
You can use the following optional settings to customize the Vercel provider instance:
|
|
65
|
+
|
|
66
|
+
- **baseURL** _string_
|
|
67
|
+
|
|
68
|
+
Use a different URL prefix for API calls. The default prefix is `https://api.v0.dev/v1`.
|
|
69
|
+
|
|
70
|
+
- **apiKey** _string_
|
|
71
|
+
|
|
72
|
+
API key that is being sent using the `Authorization` header. It defaults to
|
|
73
|
+
the `VERCEL_API_KEY` environment variable.
|
|
74
|
+
|
|
75
|
+
- **headers** _Record<string,string>_
|
|
76
|
+
|
|
77
|
+
Custom headers to include in the requests.
|
|
78
|
+
|
|
79
|
+
- **fetch** _(input: RequestInfo, init?: RequestInit) => Promise<Response>_
|
|
80
|
+
|
|
81
|
+
Custom [fetch](https://developer.mozilla.org/en-US/docs/Web/API/fetch) implementation.
|
|
82
|
+
Defaults to the global `fetch` function.
|
|
83
|
+
You can use it as a middleware to intercept requests,
|
|
84
|
+
or to provide a custom fetch implementation for e.g. testing.
|
|
85
|
+
|
|
86
|
+
## Language Models
|
|
87
|
+
|
|
88
|
+
You can create language models using a provider instance. The first argument is the model ID, for example:
|
|
89
|
+
|
|
90
|
+
```ts
|
|
91
|
+
import { vercel } from '@ai-sdk/vercel';
|
|
92
|
+
import { generateText } from 'ai';
|
|
93
|
+
|
|
94
|
+
const { text } = await generateText({
|
|
95
|
+
model: vercel('v0-1.0-md'),
|
|
96
|
+
prompt: 'Create a Next.js AI chatbot',
|
|
97
|
+
});
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
Vercel language models can also be used in the `streamText` function (see [AI SDK Core](/docs/ai-sdk-core)).
|
|
101
|
+
|
|
102
|
+
## Models
|
|
103
|
+
|
|
104
|
+
### v0-1.5-md
|
|
105
|
+
|
|
106
|
+
The `v0-1.5-md` model is for everyday tasks and UI generation.
|
|
107
|
+
|
|
108
|
+
### v0-1.5-lg
|
|
109
|
+
|
|
110
|
+
The `v0-1.5-lg` model is for advanced thinking or reasoning.
|
|
111
|
+
|
|
112
|
+
### v0-1.0-md (legacy)
|
|
113
|
+
|
|
114
|
+
The `v0-1.0-md` model is the legacy model served by the v0 API.
|
|
115
|
+
|
|
116
|
+
All v0 models have the following capabilities:
|
|
117
|
+
|
|
118
|
+
- Supports text and image inputs (multimodal)
|
|
119
|
+
- Supports function/tool calls
|
|
120
|
+
- Streaming responses with low latency
|
|
121
|
+
- Optimized for frontend and full-stack web development
|
|
122
|
+
|
|
123
|
+
## Model Capabilities
|
|
124
|
+
|
|
125
|
+
| Model | Image Input | Object Generation | Tool Usage | Tool Streaming |
|
|
126
|
+
| ----------- | ------------------- | ------------------- | ------------------- | ------------------- |
|
|
127
|
+
| `v0-1.5-md` | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> |
|
|
128
|
+
| `v0-1.5-lg` | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> |
|
|
129
|
+
| `v0-1.0-md` | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> | <Check size={18} /> |
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ai-sdk/vercel",
|
|
3
|
-
"version": "0.0.0-
|
|
3
|
+
"version": "0.0.0-98261322-20260122142521",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -8,9 +8,14 @@
|
|
|
8
8
|
"types": "./dist/index.d.ts",
|
|
9
9
|
"files": [
|
|
10
10
|
"dist/**/*",
|
|
11
|
+
"docs/**/*",
|
|
12
|
+
"src",
|
|
11
13
|
"CHANGELOG.md",
|
|
12
14
|
"README.md"
|
|
13
15
|
],
|
|
16
|
+
"directories": {
|
|
17
|
+
"doc": "./docs"
|
|
18
|
+
},
|
|
14
19
|
"exports": {
|
|
15
20
|
"./package.json": "./package.json",
|
|
16
21
|
".": {
|
|
@@ -20,9 +25,9 @@
|
|
|
20
25
|
}
|
|
21
26
|
},
|
|
22
27
|
"dependencies": {
|
|
23
|
-
"@ai-sdk/openai-compatible": "
|
|
24
|
-
"@ai-sdk/provider": "
|
|
25
|
-
"@ai-sdk/provider-utils": "
|
|
28
|
+
"@ai-sdk/openai-compatible": "2.0.17",
|
|
29
|
+
"@ai-sdk/provider": "3.0.4",
|
|
30
|
+
"@ai-sdk/provider-utils": "4.0.8"
|
|
26
31
|
},
|
|
27
32
|
"devDependencies": {
|
|
28
33
|
"@types/node": "20.17.24",
|
|
@@ -54,7 +59,7 @@
|
|
|
54
59
|
"scripts": {
|
|
55
60
|
"build": "pnpm clean && tsup --tsconfig tsconfig.build.json",
|
|
56
61
|
"build:watch": "pnpm clean && tsup --watch",
|
|
57
|
-
"clean": "del-cli dist *.tsbuildinfo",
|
|
62
|
+
"clean": "del-cli dist docs *.tsbuildinfo",
|
|
58
63
|
"lint": "eslint \"./**/*.ts*\"",
|
|
59
64
|
"type-check": "tsc --build",
|
|
60
65
|
"prettier-check": "prettier --check \"./**/*.ts*\"",
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export { createVercel, vercel } from './vercel-provider';
|
|
2
|
+
export type { VercelProvider, VercelProviderSettings } from './vercel-provider';
|
|
3
|
+
export type { OpenAICompatibleErrorData as VercelErrorData } from '@ai-sdk/openai-compatible';
|
|
4
|
+
export { VERSION } from './version';
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import { createVercel } from './vercel-provider';
|
|
2
|
+
import { OpenAICompatibleChatLanguageModel } from '@ai-sdk/openai-compatible';
|
|
3
|
+
import { LanguageModelV3 } from '@ai-sdk/provider';
|
|
4
|
+
import { loadApiKey } from '@ai-sdk/provider-utils';
|
|
5
|
+
import { describe, it, expect, vi, beforeEach, Mock } from 'vitest';
|
|
6
|
+
|
|
7
|
+
const OpenAICompatibleChatLanguageModelMock =
|
|
8
|
+
OpenAICompatibleChatLanguageModel as unknown as Mock;
|
|
9
|
+
|
|
10
|
+
vi.mock('@ai-sdk/openai-compatible', () => ({
|
|
11
|
+
OpenAICompatibleChatLanguageModel: vi.fn(),
|
|
12
|
+
OpenAICompatibleCompletionLanguageModel: vi.fn(),
|
|
13
|
+
}));
|
|
14
|
+
|
|
15
|
+
vi.mock('@ai-sdk/provider-utils', async () => {
|
|
16
|
+
const actual = await vi.importActual('@ai-sdk/provider-utils');
|
|
17
|
+
return {
|
|
18
|
+
...actual,
|
|
19
|
+
loadApiKey: vi.fn().mockReturnValue('mock-api-key'),
|
|
20
|
+
withoutTrailingSlash: vi.fn(url => url),
|
|
21
|
+
};
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
vi.mock('./vercel-image-model', () => ({
|
|
25
|
+
VercelImageModel: vi.fn(),
|
|
26
|
+
}));
|
|
27
|
+
|
|
28
|
+
describe('VercelProvider', () => {
|
|
29
|
+
let mockLanguageModel: LanguageModelV3;
|
|
30
|
+
|
|
31
|
+
beforeEach(() => {
|
|
32
|
+
mockLanguageModel = {
|
|
33
|
+
// Add any required methods for LanguageModelV1
|
|
34
|
+
} as LanguageModelV3;
|
|
35
|
+
|
|
36
|
+
// Reset mocks
|
|
37
|
+
vi.clearAllMocks();
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
describe('createVercel', () => {
|
|
41
|
+
it('should create a VercelProvider instance with default options', () => {
|
|
42
|
+
const provider = createVercel();
|
|
43
|
+
provider('model-id');
|
|
44
|
+
|
|
45
|
+
// Use the mocked version
|
|
46
|
+
const constructorCall =
|
|
47
|
+
OpenAICompatibleChatLanguageModelMock.mock.calls[0];
|
|
48
|
+
const config = constructorCall[1];
|
|
49
|
+
config.headers();
|
|
50
|
+
|
|
51
|
+
expect(loadApiKey).toHaveBeenCalledWith({
|
|
52
|
+
apiKey: undefined,
|
|
53
|
+
environmentVariableName: 'VERCEL_API_KEY',
|
|
54
|
+
description: 'Vercel',
|
|
55
|
+
});
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
it('should create a VercelProvider instance with custom options', () => {
|
|
59
|
+
const options = {
|
|
60
|
+
apiKey: 'custom-key',
|
|
61
|
+
baseURL: 'https://custom.url',
|
|
62
|
+
headers: { 'Custom-Header': 'value' },
|
|
63
|
+
};
|
|
64
|
+
const provider = createVercel(options);
|
|
65
|
+
provider('model-id');
|
|
66
|
+
|
|
67
|
+
const constructorCall =
|
|
68
|
+
OpenAICompatibleChatLanguageModelMock.mock.calls[0];
|
|
69
|
+
const config = constructorCall[1];
|
|
70
|
+
config.headers();
|
|
71
|
+
|
|
72
|
+
expect(loadApiKey).toHaveBeenCalledWith({
|
|
73
|
+
apiKey: 'custom-key',
|
|
74
|
+
environmentVariableName: 'VERCEL_API_KEY',
|
|
75
|
+
description: 'Vercel',
|
|
76
|
+
});
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
it('should return a chat model when called as a function', () => {
|
|
80
|
+
const provider = createVercel();
|
|
81
|
+
const modelId = 'foo-model-id';
|
|
82
|
+
|
|
83
|
+
const model = provider(modelId);
|
|
84
|
+
expect(model).toBeInstanceOf(OpenAICompatibleChatLanguageModel);
|
|
85
|
+
});
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
it('should construct a language model with correct configuration', () => {
|
|
89
|
+
const provider = createVercel();
|
|
90
|
+
const modelId = 'vercel-chat-model';
|
|
91
|
+
|
|
92
|
+
const model = provider.languageModel(modelId);
|
|
93
|
+
|
|
94
|
+
expect(model).toBeInstanceOf(OpenAICompatibleChatLanguageModel);
|
|
95
|
+
expect(OpenAICompatibleChatLanguageModelMock).toHaveBeenCalledWith(
|
|
96
|
+
modelId,
|
|
97
|
+
expect.objectContaining({
|
|
98
|
+
provider: 'vercel.chat',
|
|
99
|
+
}),
|
|
100
|
+
);
|
|
101
|
+
});
|
|
102
|
+
});
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import {
|
|
2
|
+
LanguageModelV3,
|
|
3
|
+
NoSuchModelError,
|
|
4
|
+
ProviderV3,
|
|
5
|
+
} from '@ai-sdk/provider';
|
|
6
|
+
import { OpenAICompatibleChatLanguageModel } from '@ai-sdk/openai-compatible';
|
|
7
|
+
import {
|
|
8
|
+
FetchFunction,
|
|
9
|
+
loadApiKey,
|
|
10
|
+
withoutTrailingSlash,
|
|
11
|
+
withUserAgentSuffix,
|
|
12
|
+
} from '@ai-sdk/provider-utils';
|
|
13
|
+
import { VercelChatModelId } from './vercel-chat-options';
|
|
14
|
+
import { VERSION } from './version';
|
|
15
|
+
|
|
16
|
+
export interface VercelProviderSettings {
|
|
17
|
+
/**
|
|
18
|
+
Vercel API key.
|
|
19
|
+
*/
|
|
20
|
+
apiKey?: string;
|
|
21
|
+
/**
|
|
22
|
+
Base URL for the API calls.
|
|
23
|
+
*/
|
|
24
|
+
baseURL?: string;
|
|
25
|
+
/**
|
|
26
|
+
Custom headers to include in the requests.
|
|
27
|
+
*/
|
|
28
|
+
headers?: Record<string, string>;
|
|
29
|
+
/**
|
|
30
|
+
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
|
31
|
+
or to provide a custom fetch implementation for e.g. testing.
|
|
32
|
+
*/
|
|
33
|
+
fetch?: FetchFunction;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export interface VercelProvider extends ProviderV3 {
|
|
37
|
+
/**
|
|
38
|
+
Creates a model for text generation.
|
|
39
|
+
*/
|
|
40
|
+
(modelId: VercelChatModelId): LanguageModelV3;
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
Creates a language model for text generation.
|
|
44
|
+
*/
|
|
45
|
+
languageModel(modelId: VercelChatModelId): LanguageModelV3;
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* @deprecated Use `embeddingModel` instead.
|
|
49
|
+
*/
|
|
50
|
+
textEmbeddingModel(modelId: string): never;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export function createVercel(
|
|
54
|
+
options: VercelProviderSettings = {},
|
|
55
|
+
): VercelProvider {
|
|
56
|
+
const baseURL = withoutTrailingSlash(
|
|
57
|
+
options.baseURL ?? 'https://api.v0.dev/v1',
|
|
58
|
+
);
|
|
59
|
+
const getHeaders = () =>
|
|
60
|
+
withUserAgentSuffix(
|
|
61
|
+
{
|
|
62
|
+
Authorization: `Bearer ${loadApiKey({
|
|
63
|
+
apiKey: options.apiKey,
|
|
64
|
+
environmentVariableName: 'VERCEL_API_KEY',
|
|
65
|
+
description: 'Vercel',
|
|
66
|
+
})}`,
|
|
67
|
+
...options.headers,
|
|
68
|
+
},
|
|
69
|
+
`ai-sdk/vercel/${VERSION}`,
|
|
70
|
+
);
|
|
71
|
+
|
|
72
|
+
interface CommonModelConfig {
|
|
73
|
+
provider: string;
|
|
74
|
+
url: ({ path }: { path: string }) => string;
|
|
75
|
+
headers: () => Record<string, string>;
|
|
76
|
+
fetch?: FetchFunction;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
const getCommonModelConfig = (modelType: string): CommonModelConfig => ({
|
|
80
|
+
provider: `vercel.${modelType}`,
|
|
81
|
+
url: ({ path }) => `${baseURL}${path}`,
|
|
82
|
+
headers: getHeaders,
|
|
83
|
+
fetch: options.fetch,
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
const createChatModel = (modelId: VercelChatModelId) => {
|
|
87
|
+
return new OpenAICompatibleChatLanguageModel(modelId, {
|
|
88
|
+
...getCommonModelConfig('chat'),
|
|
89
|
+
});
|
|
90
|
+
};
|
|
91
|
+
|
|
92
|
+
const provider = (modelId: VercelChatModelId) => createChatModel(modelId);
|
|
93
|
+
|
|
94
|
+
provider.specificationVersion = 'v3' as const;
|
|
95
|
+
provider.languageModel = createChatModel;
|
|
96
|
+
provider.embeddingModel = (modelId: string) => {
|
|
97
|
+
throw new NoSuchModelError({ modelId, modelType: 'embeddingModel' });
|
|
98
|
+
};
|
|
99
|
+
provider.textEmbeddingModel = provider.embeddingModel;
|
|
100
|
+
provider.imageModel = (modelId: string) => {
|
|
101
|
+
throw new NoSuchModelError({ modelId, modelType: 'imageModel' });
|
|
102
|
+
};
|
|
103
|
+
|
|
104
|
+
return provider;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
export const vercel = createVercel();
|