@ai-sdk/cohere 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +13 -0
- package/README.md +35 -0
- package/dist/index.d.mts +65 -0
- package/dist/index.d.ts +65 -0
- package/dist/index.js +392 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +376 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +62 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
Copyright 2023 Vercel, Inc.
|
|
2
|
+
|
|
3
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
you may not use this file except in compliance with the License.
|
|
5
|
+
You may obtain a copy of the License at
|
|
6
|
+
|
|
7
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
|
|
9
|
+
Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
See the License for the specific language governing permissions and
|
|
13
|
+
limitations under the License.
|
package/README.md
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# Vercel AI SDK - Cohere Provider
|
|
2
|
+
|
|
3
|
+
The **[Cohere provider](https://sdk.vercel.ai/providers/ai-sdk-providers/cohere)** for the [Vercel AI SDK](https://sdk.vercel.ai/docs) contains language model support for the Cohere API.
|
|
4
|
+
|
|
5
|
+
## Setup
|
|
6
|
+
|
|
7
|
+
The Cohere provider is available in the `@ai-sdk/cohere` module. You can install it with
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm i @ai-sdk/cohere
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Provider Instance
|
|
14
|
+
|
|
15
|
+
You can import the default provider instance `cohere` from `@ai-sdk/cohere`:
|
|
16
|
+
|
|
17
|
+
```ts
|
|
18
|
+
import { cohere } from '@ai-sdk/cohere';
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
## Example
|
|
22
|
+
|
|
23
|
+
```ts
|
|
24
|
+
import { cohere } from '@ai-sdk/cohere';
|
|
25
|
+
import { generateText } from 'ai';
|
|
26
|
+
|
|
27
|
+
const { text } = await generateText({
|
|
28
|
+
model: cohere('command-r-plus'),
|
|
29
|
+
prompt: 'Write a vegetarian lasagna recipe for 4 people.',
|
|
30
|
+
});
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
## Documentation
|
|
34
|
+
|
|
35
|
+
Please check out the **[Cohere provider](https://sdk.vercel.ai/providers/ai-sdk-providers/cohere)** for more information.
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { LanguageModelV1 } from '@ai-sdk/provider';
|
|
2
|
+
|
|
3
|
+
type CohereChatModelId = 'command-r-plus' | 'command-r' | 'command' | 'command-light' | (string & {});
|
|
4
|
+
interface CohereChatSettings {
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
type CohereChatConfig = {
|
|
8
|
+
provider: string;
|
|
9
|
+
baseURL: string;
|
|
10
|
+
headers: () => Record<string, string | undefined>;
|
|
11
|
+
generateId: () => string;
|
|
12
|
+
fetch?: typeof fetch;
|
|
13
|
+
};
|
|
14
|
+
declare class CohereChatLanguageModel implements LanguageModelV1 {
|
|
15
|
+
readonly specificationVersion = "v1";
|
|
16
|
+
readonly defaultObjectGenerationMode: undefined;
|
|
17
|
+
readonly modelId: CohereChatModelId;
|
|
18
|
+
readonly settings: CohereChatSettings;
|
|
19
|
+
private readonly config;
|
|
20
|
+
constructor(modelId: CohereChatModelId, settings: CohereChatSettings, config: CohereChatConfig);
|
|
21
|
+
get provider(): string;
|
|
22
|
+
private getArgs;
|
|
23
|
+
doGenerate(options: Parameters<LanguageModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>>;
|
|
24
|
+
doStream(options: Parameters<LanguageModelV1['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>>;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
interface CohereProvider {
|
|
28
|
+
(modelId: CohereChatModelId, settings?: CohereChatSettings): CohereChatLanguageModel;
|
|
29
|
+
/**
|
|
30
|
+
Creates a model for text generation.
|
|
31
|
+
*/
|
|
32
|
+
languageModel(modelId: CohereChatModelId, settings?: CohereChatSettings): CohereChatLanguageModel;
|
|
33
|
+
}
|
|
34
|
+
interface CohereProviderSettings {
|
|
35
|
+
/**
|
|
36
|
+
Use a different URL prefix for API calls, e.g. to use proxy servers.
|
|
37
|
+
The default prefix is `https://api.cohere.com/v1`.
|
|
38
|
+
*/
|
|
39
|
+
baseURL?: string;
|
|
40
|
+
/**
|
|
41
|
+
API key that is being send using the `Authorization` header.
|
|
42
|
+
It defaults to the `MISTRAL_API_KEY` environment variable.
|
|
43
|
+
*/
|
|
44
|
+
apiKey?: string;
|
|
45
|
+
/**
|
|
46
|
+
Custom headers to include in the requests.
|
|
47
|
+
*/
|
|
48
|
+
headers?: Record<string, string>;
|
|
49
|
+
/**
|
|
50
|
+
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
|
51
|
+
or to provide a custom fetch implementation for e.g. testing.
|
|
52
|
+
*/
|
|
53
|
+
fetch?: typeof fetch;
|
|
54
|
+
generateId?: () => string;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
Create a Cohere AI provider instance.
|
|
58
|
+
*/
|
|
59
|
+
declare function createCohere(options?: CohereProviderSettings): CohereProvider;
|
|
60
|
+
/**
|
|
61
|
+
Default Cohere provider instance.
|
|
62
|
+
*/
|
|
63
|
+
declare const cohere: CohereProvider;
|
|
64
|
+
|
|
65
|
+
export { type CohereProvider, type CohereProviderSettings, cohere, createCohere };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { LanguageModelV1 } from '@ai-sdk/provider';
|
|
2
|
+
|
|
3
|
+
type CohereChatModelId = 'command-r-plus' | 'command-r' | 'command' | 'command-light' | (string & {});
|
|
4
|
+
interface CohereChatSettings {
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
type CohereChatConfig = {
|
|
8
|
+
provider: string;
|
|
9
|
+
baseURL: string;
|
|
10
|
+
headers: () => Record<string, string | undefined>;
|
|
11
|
+
generateId: () => string;
|
|
12
|
+
fetch?: typeof fetch;
|
|
13
|
+
};
|
|
14
|
+
declare class CohereChatLanguageModel implements LanguageModelV1 {
|
|
15
|
+
readonly specificationVersion = "v1";
|
|
16
|
+
readonly defaultObjectGenerationMode: undefined;
|
|
17
|
+
readonly modelId: CohereChatModelId;
|
|
18
|
+
readonly settings: CohereChatSettings;
|
|
19
|
+
private readonly config;
|
|
20
|
+
constructor(modelId: CohereChatModelId, settings: CohereChatSettings, config: CohereChatConfig);
|
|
21
|
+
get provider(): string;
|
|
22
|
+
private getArgs;
|
|
23
|
+
doGenerate(options: Parameters<LanguageModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>>;
|
|
24
|
+
doStream(options: Parameters<LanguageModelV1['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>>;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
interface CohereProvider {
|
|
28
|
+
(modelId: CohereChatModelId, settings?: CohereChatSettings): CohereChatLanguageModel;
|
|
29
|
+
/**
|
|
30
|
+
Creates a model for text generation.
|
|
31
|
+
*/
|
|
32
|
+
languageModel(modelId: CohereChatModelId, settings?: CohereChatSettings): CohereChatLanguageModel;
|
|
33
|
+
}
|
|
34
|
+
interface CohereProviderSettings {
|
|
35
|
+
/**
|
|
36
|
+
Use a different URL prefix for API calls, e.g. to use proxy servers.
|
|
37
|
+
The default prefix is `https://api.cohere.com/v1`.
|
|
38
|
+
*/
|
|
39
|
+
baseURL?: string;
|
|
40
|
+
/**
|
|
41
|
+
API key that is being send using the `Authorization` header.
|
|
42
|
+
It defaults to the `MISTRAL_API_KEY` environment variable.
|
|
43
|
+
*/
|
|
44
|
+
apiKey?: string;
|
|
45
|
+
/**
|
|
46
|
+
Custom headers to include in the requests.
|
|
47
|
+
*/
|
|
48
|
+
headers?: Record<string, string>;
|
|
49
|
+
/**
|
|
50
|
+
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
|
51
|
+
or to provide a custom fetch implementation for e.g. testing.
|
|
52
|
+
*/
|
|
53
|
+
fetch?: typeof fetch;
|
|
54
|
+
generateId?: () => string;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
Create a Cohere AI provider instance.
|
|
58
|
+
*/
|
|
59
|
+
declare function createCohere(options?: CohereProviderSettings): CohereProvider;
|
|
60
|
+
/**
|
|
61
|
+
Default Cohere provider instance.
|
|
62
|
+
*/
|
|
63
|
+
declare const cohere: CohereProvider;
|
|
64
|
+
|
|
65
|
+
export { type CohereProvider, type CohereProviderSettings, cohere, createCohere };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,392 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var src_exports = {};
|
|
22
|
+
__export(src_exports, {
|
|
23
|
+
cohere: () => cohere,
|
|
24
|
+
createCohere: () => createCohere
|
|
25
|
+
});
|
|
26
|
+
module.exports = __toCommonJS(src_exports);
|
|
27
|
+
|
|
28
|
+
// src/cohere-provider.ts
|
|
29
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
30
|
+
|
|
31
|
+
// src/cohere-chat-language-model.ts
|
|
32
|
+
var import_provider2 = require("@ai-sdk/provider");
|
|
33
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
34
|
+
var import_zod2 = require("zod");
|
|
35
|
+
|
|
36
|
+
// src/cohere-error.ts
|
|
37
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
38
|
+
var import_zod = require("zod");
|
|
39
|
+
var cohereErrorDataSchema = import_zod.z.object({
|
|
40
|
+
message: import_zod.z.string()
|
|
41
|
+
});
|
|
42
|
+
var cohereFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
|
|
43
|
+
errorSchema: cohereErrorDataSchema,
|
|
44
|
+
errorToMessage: (data) => data.message
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
// src/convert-to-cohere-chat-prompt.ts
|
|
48
|
+
var import_provider = require("@ai-sdk/provider");
|
|
49
|
+
function convertToCohereChatPrompt(prompt) {
|
|
50
|
+
const messages = [];
|
|
51
|
+
for (const { role, content } of prompt) {
|
|
52
|
+
switch (role) {
|
|
53
|
+
case "system": {
|
|
54
|
+
messages.push({ role: "SYSTEM", message: content });
|
|
55
|
+
break;
|
|
56
|
+
}
|
|
57
|
+
case "user": {
|
|
58
|
+
messages.push({
|
|
59
|
+
role: "USER",
|
|
60
|
+
message: content.map((part) => {
|
|
61
|
+
switch (part.type) {
|
|
62
|
+
case "text": {
|
|
63
|
+
return part.text;
|
|
64
|
+
}
|
|
65
|
+
case "image": {
|
|
66
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
|
67
|
+
functionality: "image-part"
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}).join("")
|
|
72
|
+
});
|
|
73
|
+
break;
|
|
74
|
+
}
|
|
75
|
+
case "assistant": {
|
|
76
|
+
let text = "";
|
|
77
|
+
const toolCalls = [];
|
|
78
|
+
for (const part of content) {
|
|
79
|
+
switch (part.type) {
|
|
80
|
+
case "text": {
|
|
81
|
+
text += part.text;
|
|
82
|
+
break;
|
|
83
|
+
}
|
|
84
|
+
case "tool-call": {
|
|
85
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
|
86
|
+
functionality: "tool-call"
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
default: {
|
|
90
|
+
const _exhaustiveCheck = part;
|
|
91
|
+
throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
messages.push({
|
|
96
|
+
role: "CHATBOT",
|
|
97
|
+
message: text,
|
|
98
|
+
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
|
|
99
|
+
});
|
|
100
|
+
break;
|
|
101
|
+
}
|
|
102
|
+
case "tool": {
|
|
103
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
|
104
|
+
functionality: "tool role"
|
|
105
|
+
});
|
|
106
|
+
}
|
|
107
|
+
default: {
|
|
108
|
+
const _exhaustiveCheck = role;
|
|
109
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
return messages;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// src/map-cohere-finish-reason.ts
|
|
117
|
+
function mapCohereFinishReason(finishReason) {
|
|
118
|
+
switch (finishReason) {
|
|
119
|
+
case "COMPLETE":
|
|
120
|
+
case "STOP_SEQUENCE":
|
|
121
|
+
return "stop";
|
|
122
|
+
case "MAX_TOKENS":
|
|
123
|
+
return "length";
|
|
124
|
+
case "ERROR":
|
|
125
|
+
case "ERROR_LIMIT":
|
|
126
|
+
return "error";
|
|
127
|
+
case "ERROR_TOXIC":
|
|
128
|
+
return "content-filter";
|
|
129
|
+
case "USER_CANCEL":
|
|
130
|
+
return "other";
|
|
131
|
+
default:
|
|
132
|
+
return "unknown";
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// src/cohere-chat-language-model.ts
|
|
137
|
+
var CohereChatLanguageModel = class {
|
|
138
|
+
constructor(modelId, settings, config) {
|
|
139
|
+
this.specificationVersion = "v1";
|
|
140
|
+
this.defaultObjectGenerationMode = void 0;
|
|
141
|
+
this.modelId = modelId;
|
|
142
|
+
this.settings = settings;
|
|
143
|
+
this.config = config;
|
|
144
|
+
}
|
|
145
|
+
get provider() {
|
|
146
|
+
return this.config.provider;
|
|
147
|
+
}
|
|
148
|
+
getArgs({
|
|
149
|
+
mode,
|
|
150
|
+
prompt,
|
|
151
|
+
maxTokens,
|
|
152
|
+
temperature,
|
|
153
|
+
topP,
|
|
154
|
+
frequencyPenalty,
|
|
155
|
+
presencePenalty,
|
|
156
|
+
seed
|
|
157
|
+
}) {
|
|
158
|
+
const type = mode.type;
|
|
159
|
+
const chatPrompt = convertToCohereChatPrompt(prompt);
|
|
160
|
+
const [lastMessage, ...history] = chatPrompt;
|
|
161
|
+
const args = {
|
|
162
|
+
// model id:
|
|
163
|
+
model: this.modelId,
|
|
164
|
+
// model specific settings:
|
|
165
|
+
// none
|
|
166
|
+
// standardized settings:
|
|
167
|
+
frequency_penalty: frequencyPenalty,
|
|
168
|
+
presence_penalty: presencePenalty,
|
|
169
|
+
max_tokens: maxTokens,
|
|
170
|
+
temperature,
|
|
171
|
+
p: topP,
|
|
172
|
+
seed,
|
|
173
|
+
// messages:
|
|
174
|
+
chat_history: history,
|
|
175
|
+
message: lastMessage.role === "USER" ? lastMessage.message : void 0
|
|
176
|
+
};
|
|
177
|
+
switch (type) {
|
|
178
|
+
case "regular": {
|
|
179
|
+
return args;
|
|
180
|
+
}
|
|
181
|
+
case "object-json": {
|
|
182
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
183
|
+
functionality: "object-json mode"
|
|
184
|
+
});
|
|
185
|
+
}
|
|
186
|
+
case "object-tool": {
|
|
187
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
188
|
+
functionality: "object-tool mode"
|
|
189
|
+
});
|
|
190
|
+
}
|
|
191
|
+
case "object-grammar": {
|
|
192
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
193
|
+
functionality: "object-grammar mode"
|
|
194
|
+
});
|
|
195
|
+
}
|
|
196
|
+
default: {
|
|
197
|
+
const _exhaustiveCheck = type;
|
|
198
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
async doGenerate(options) {
|
|
203
|
+
const args = this.getArgs(options);
|
|
204
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
|
|
205
|
+
url: `${this.config.baseURL}/chat`,
|
|
206
|
+
headers: this.config.headers(),
|
|
207
|
+
body: args,
|
|
208
|
+
failedResponseHandler: cohereFailedResponseHandler,
|
|
209
|
+
successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
|
|
210
|
+
cohereChatResponseSchema
|
|
211
|
+
),
|
|
212
|
+
abortSignal: options.abortSignal,
|
|
213
|
+
fetch: this.config.fetch
|
|
214
|
+
});
|
|
215
|
+
const { chat_history, message, ...rawSettings } = args;
|
|
216
|
+
return {
|
|
217
|
+
text: response.text,
|
|
218
|
+
finishReason: mapCohereFinishReason(response.finish_reason),
|
|
219
|
+
usage: {
|
|
220
|
+
promptTokens: response.meta.tokens.input_tokens,
|
|
221
|
+
completionTokens: response.meta.tokens.output_tokens
|
|
222
|
+
},
|
|
223
|
+
rawCall: {
|
|
224
|
+
rawPrompt: {
|
|
225
|
+
chat_history,
|
|
226
|
+
message
|
|
227
|
+
},
|
|
228
|
+
rawSettings
|
|
229
|
+
},
|
|
230
|
+
rawResponse: { headers: responseHeaders },
|
|
231
|
+
warnings: void 0
|
|
232
|
+
};
|
|
233
|
+
}
|
|
234
|
+
async doStream(options) {
|
|
235
|
+
const args = this.getArgs(options);
|
|
236
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
|
|
237
|
+
url: `${this.config.baseURL}/chat`,
|
|
238
|
+
headers: this.config.headers(),
|
|
239
|
+
body: {
|
|
240
|
+
...args,
|
|
241
|
+
stream: true
|
|
242
|
+
},
|
|
243
|
+
failedResponseHandler: cohereFailedResponseHandler,
|
|
244
|
+
successfulResponseHandler: (0, import_provider_utils2.createJsonStreamResponseHandler)(
|
|
245
|
+
cohereChatChunkSchema
|
|
246
|
+
),
|
|
247
|
+
abortSignal: options.abortSignal,
|
|
248
|
+
fetch: this.config.fetch
|
|
249
|
+
});
|
|
250
|
+
const { chat_history, message, ...rawSettings } = args;
|
|
251
|
+
let finishReason = "other";
|
|
252
|
+
let usage = {
|
|
253
|
+
promptTokens: Number.NaN,
|
|
254
|
+
completionTokens: Number.NaN
|
|
255
|
+
};
|
|
256
|
+
return {
|
|
257
|
+
stream: response.pipeThrough(
|
|
258
|
+
new TransformStream({
|
|
259
|
+
transform(chunk, controller) {
|
|
260
|
+
if (!chunk.success) {
|
|
261
|
+
finishReason = "error";
|
|
262
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
|
263
|
+
return;
|
|
264
|
+
}
|
|
265
|
+
const value = chunk.value;
|
|
266
|
+
const type = value.event_type;
|
|
267
|
+
switch (type) {
|
|
268
|
+
case "text-generation": {
|
|
269
|
+
controller.enqueue({
|
|
270
|
+
type: "text-delta",
|
|
271
|
+
textDelta: value.text
|
|
272
|
+
});
|
|
273
|
+
return;
|
|
274
|
+
}
|
|
275
|
+
case "stream-end": {
|
|
276
|
+
finishReason = mapCohereFinishReason(value.finish_reason);
|
|
277
|
+
const tokens = value.response.meta.tokens;
|
|
278
|
+
usage = {
|
|
279
|
+
promptTokens: tokens.input_tokens,
|
|
280
|
+
completionTokens: tokens.output_tokens
|
|
281
|
+
};
|
|
282
|
+
}
|
|
283
|
+
default: {
|
|
284
|
+
return;
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
},
|
|
288
|
+
flush(controller) {
|
|
289
|
+
controller.enqueue({
|
|
290
|
+
type: "finish",
|
|
291
|
+
finishReason,
|
|
292
|
+
usage
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
})
|
|
296
|
+
),
|
|
297
|
+
rawCall: {
|
|
298
|
+
rawPrompt: {
|
|
299
|
+
chat_history,
|
|
300
|
+
message
|
|
301
|
+
},
|
|
302
|
+
rawSettings
|
|
303
|
+
},
|
|
304
|
+
rawResponse: { headers: responseHeaders },
|
|
305
|
+
warnings: []
|
|
306
|
+
};
|
|
307
|
+
}
|
|
308
|
+
};
|
|
309
|
+
var cohereChatResponseSchema = import_zod2.z.object({
|
|
310
|
+
text: import_zod2.z.string(),
|
|
311
|
+
finish_reason: import_zod2.z.string(),
|
|
312
|
+
meta: import_zod2.z.object({
|
|
313
|
+
tokens: import_zod2.z.object({
|
|
314
|
+
input_tokens: import_zod2.z.number(),
|
|
315
|
+
output_tokens: import_zod2.z.number()
|
|
316
|
+
})
|
|
317
|
+
})
|
|
318
|
+
});
|
|
319
|
+
var cohereChatChunkSchema = import_zod2.z.discriminatedUnion("event_type", [
|
|
320
|
+
import_zod2.z.object({
|
|
321
|
+
event_type: import_zod2.z.literal("stream-start")
|
|
322
|
+
}),
|
|
323
|
+
import_zod2.z.object({
|
|
324
|
+
event_type: import_zod2.z.literal("search-queries-generation")
|
|
325
|
+
}),
|
|
326
|
+
import_zod2.z.object({
|
|
327
|
+
event_type: import_zod2.z.literal("search-results")
|
|
328
|
+
}),
|
|
329
|
+
import_zod2.z.object({
|
|
330
|
+
event_type: import_zod2.z.literal("text-generation"),
|
|
331
|
+
text: import_zod2.z.string()
|
|
332
|
+
}),
|
|
333
|
+
import_zod2.z.object({
|
|
334
|
+
event_type: import_zod2.z.literal("citation-generation")
|
|
335
|
+
}),
|
|
336
|
+
import_zod2.z.object({
|
|
337
|
+
event_type: import_zod2.z.literal("tool-calls-generation")
|
|
338
|
+
}),
|
|
339
|
+
import_zod2.z.object({
|
|
340
|
+
event_type: import_zod2.z.literal("stream-end"),
|
|
341
|
+
finish_reason: import_zod2.z.string(),
|
|
342
|
+
response: import_zod2.z.object({
|
|
343
|
+
meta: import_zod2.z.object({
|
|
344
|
+
tokens: import_zod2.z.object({
|
|
345
|
+
input_tokens: import_zod2.z.number(),
|
|
346
|
+
output_tokens: import_zod2.z.number()
|
|
347
|
+
})
|
|
348
|
+
})
|
|
349
|
+
})
|
|
350
|
+
})
|
|
351
|
+
]);
|
|
352
|
+
|
|
353
|
+
// src/cohere-provider.ts
|
|
354
|
+
function createCohere(options = {}) {
|
|
355
|
+
var _a;
|
|
356
|
+
const baseURL = (_a = (0, import_provider_utils3.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.cohere.com/v1";
|
|
357
|
+
const getHeaders = () => ({
|
|
358
|
+
Authorization: `Bearer ${(0, import_provider_utils3.loadApiKey)({
|
|
359
|
+
apiKey: options.apiKey,
|
|
360
|
+
environmentVariableName: "COHERE_API_KEY",
|
|
361
|
+
description: "Cohere"
|
|
362
|
+
})}`,
|
|
363
|
+
...options.headers
|
|
364
|
+
});
|
|
365
|
+
const createChatModel = (modelId, settings = {}) => {
|
|
366
|
+
var _a2;
|
|
367
|
+
return new CohereChatLanguageModel(modelId, settings, {
|
|
368
|
+
provider: "mistral.chat",
|
|
369
|
+
baseURL,
|
|
370
|
+
headers: getHeaders,
|
|
371
|
+
generateId: (_a2 = options.generateId) != null ? _a2 : import_provider_utils3.generateId,
|
|
372
|
+
fetch: options.fetch
|
|
373
|
+
});
|
|
374
|
+
};
|
|
375
|
+
const provider = function(modelId, settings) {
|
|
376
|
+
if (new.target) {
|
|
377
|
+
throw new Error(
|
|
378
|
+
"The Cohere model function cannot be called with the new keyword."
|
|
379
|
+
);
|
|
380
|
+
}
|
|
381
|
+
return createChatModel(modelId, settings);
|
|
382
|
+
};
|
|
383
|
+
provider.languageModel = createChatModel;
|
|
384
|
+
return provider;
|
|
385
|
+
}
|
|
386
|
+
var cohere = createCohere();
|
|
387
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
388
|
+
0 && (module.exports = {
|
|
389
|
+
cohere,
|
|
390
|
+
createCohere
|
|
391
|
+
});
|
|
392
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/cohere-provider.ts","../src/cohere-chat-language-model.ts","../src/cohere-error.ts","../src/convert-to-cohere-chat-prompt.ts","../src/map-cohere-finish-reason.ts"],"sourcesContent":["export * from './cohere-provider';\n","import {\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { CohereChatModelId, CohereChatSettings } from './cohere-chat-settings';\nimport { CohereChatLanguageModel } from './cohere-chat-language-model';\n\nexport interface CohereProvider {\n (\n modelId: CohereChatModelId,\n settings?: CohereChatSettings,\n ): CohereChatLanguageModel;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(\n modelId: CohereChatModelId,\n settings?: CohereChatSettings,\n ): CohereChatLanguageModel;\n}\n\nexport interface CohereProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.cohere.com/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: typeof fetch;\n\n generateId?: () => string;\n}\n\n/**\nCreate a Cohere AI provider instance.\n */\nexport function createCohere(\n options: CohereProviderSettings = {},\n): CohereProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.cohere.com/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'COHERE_API_KEY',\n description: 'Cohere',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: CohereChatModelId,\n settings: CohereChatSettings = {},\n ) =>\n new CohereChatLanguageModel(modelId, settings, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: CohereChatModelId,\n settings?: CohereChatSettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Cohere model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n\n return provider as CohereProvider;\n}\n\n/**\nDefault Cohere provider instance.\n */\nexport const cohere = createCohere();\n","import {\n LanguageModelV1,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n ParseResult,\n createJsonResponseHandler,\n createJsonStreamResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { CohereChatModelId, CohereChatSettings } from './cohere-chat-settings';\nimport { cohereFailedResponseHandler } from './cohere-error';\nimport { convertToCohereChatPrompt } from './convert-to-cohere-chat-prompt';\nimport { mapCohereFinishReason } from './map-cohere-finish-reason';\n\ntype CohereChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n generateId: () => string;\n fetch?: typeof fetch;\n};\n\nexport class CohereChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: CohereChatModelId;\n readonly settings: CohereChatSettings;\n\n private readonly config: CohereChatConfig;\n\n constructor(\n modelId: CohereChatModelId,\n settings: CohereChatSettings,\n config: CohereChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n // Cohere distinguishes between the current message and the chat history\n const chatPrompt = convertToCohereChatPrompt(prompt);\n const [lastMessage, ...history] = chatPrompt;\n\n const args = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // none\n\n // standardized settings:\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n max_tokens: maxTokens,\n temperature,\n p: topP,\n seed,\n\n // messages:\n chat_history: history,\n message: lastMessage.role === 'USER' ? lastMessage.message : undefined,\n };\n\n switch (type) {\n case 'regular': {\n return args;\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-json mode',\n });\n }\n\n case 'object-tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-tool mode',\n });\n }\n\n case 'object-grammar': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-grammar mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const args = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat`,\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: cohereFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n cohereChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { chat_history, message, ...rawSettings } = args;\n\n return {\n text: response.text,\n finishReason: mapCohereFinishReason(response.finish_reason),\n usage: {\n promptTokens: response.meta.tokens.input_tokens,\n completionTokens: response.meta.tokens.output_tokens,\n },\n rawCall: {\n rawPrompt: {\n chat_history,\n message,\n },\n rawSettings,\n },\n rawResponse: { headers: responseHeaders },\n warnings: undefined,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const args = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat`,\n headers: this.config.headers(),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: cohereFailedResponseHandler,\n successfulResponseHandler: createJsonStreamResponseHandler(\n cohereChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { chat_history, message, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'other';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof cohereChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n const type = value.event_type;\n\n switch (type) {\n case 'text-generation': {\n controller.enqueue({\n type: 'text-delta',\n textDelta: value.text,\n });\n return;\n }\n\n case 'stream-end': {\n finishReason = mapCohereFinishReason(value.finish_reason);\n const tokens = value.response.meta.tokens;\n\n usage = {\n promptTokens: tokens.input_tokens,\n completionTokens: tokens.output_tokens,\n };\n }\n\n default: {\n return;\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n rawCall: {\n rawPrompt: {\n chat_history,\n message,\n },\n rawSettings,\n },\n rawResponse: { headers: responseHeaders },\n warnings: [],\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst cohereChatResponseSchema = z.object({\n text: z.string(),\n finish_reason: z.string(),\n meta: z.object({\n tokens: z.object({\n input_tokens: z.number(),\n output_tokens: z.number(),\n }),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst cohereChatChunkSchema = z.discriminatedUnion('event_type', [\n z.object({\n event_type: z.literal('stream-start'),\n }),\n z.object({\n event_type: z.literal('search-queries-generation'),\n }),\n z.object({\n event_type: z.literal('search-results'),\n }),\n z.object({\n event_type: z.literal('text-generation'),\n text: z.string(),\n }),\n z.object({\n event_type: z.literal('citation-generation'),\n }),\n z.object({\n event_type: z.literal('tool-calls-generation'),\n }),\n z.object({\n event_type: z.literal('stream-end'),\n finish_reason: z.string(),\n response: z.object({\n meta: z.object({\n tokens: z.object({\n input_tokens: z.number(),\n output_tokens: z.number(),\n }),\n }),\n }),\n }),\n]);\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst cohereErrorDataSchema = z.object({\n message: z.string(),\n});\n\nexport type CohereErrorData = z.infer<typeof cohereErrorDataSchema>;\n\nexport const cohereFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: cohereErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { CohereChatPrompt } from './cohere-chat-prompt';\n\nexport function convertToCohereChatPrompt(\n prompt: LanguageModelV1Prompt,\n): CohereChatPrompt {\n const messages: CohereChatPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n messages.push({ role: 'SYSTEM', message: content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'USER',\n message: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'image-part',\n });\n }\n }\n })\n .join(''),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n name: string;\n parameters: object;\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call',\n });\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: 'CHATBOT',\n message: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool role',\n });\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapCohereFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'COMPLETE':\n case 'STOP_SEQUENCE':\n return 'stop';\n\n case 'MAX_TOKENS':\n return 'length';\n\n case 'ERROR':\n case 'ERROR_LIMIT':\n return 'error';\n\n case 'ERROR_TOXIC':\n return 'content-filter';\n\n case 'USER_CANCEL':\n return 'other';\n\n default:\n return 'unknown';\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,yBAIO;;;ACJP,IAAAC,mBAKO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;;;ACZlB,4BAA+C;AAC/C,iBAAkB;AAElB,IAAM,wBAAwB,aAAE,OAAO;AAAA,EACrC,SAAS,aAAE,OAAO;AACpB,CAAC;AAIM,IAAM,kCAA8B,sDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;ACZD,sBAGO;AAGA,SAAS,0BACd,QACkB;AAClB,QAAM,WAA6B,CAAC;AAEpC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,QAAQ,CAAC;AAClD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QACN,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK;AAAA,cACd;AAAA,cACA,KAAK,SAAS;AACZ,sBAAM,IAAI,8CAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC,EACA,KAAK,EAAE;AAAA,QACZ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAGD,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,8CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,cAAM,IAAI,8CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACnFO,SAAS,sBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET;AACE,aAAO;AAAA,EACX;AACF;;;AHAO,IAAM,0BAAN,MAAyD;AAAA,EAS9D,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAGlB,UAAM,aAAa,0BAA0B,MAAM;AACnD,UAAM,CAAC,aAAa,GAAG,OAAO,IAAI;AAElC,UAAM,OAAO;AAAA;AAAA,MAEX,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,MAMZ,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,YAAY;AAAA,MACZ;AAAA,MACA,GAAG;AAAA,MACH;AAAA;AAAA,MAGA,cAAc;AAAA,MACd,SAAS,YAAY,SAAS,SAAS,YAAY,UAAU;AAAA,IAC/D;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,eAAO;AAAA,MACT;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,kBAAkB;AACrB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,OAAO,KAAK,QAAQ,OAAO;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,cAAc,SAAS,GAAG,YAAY,IAAI;AAElD,WAAO;AAAA,MACL,MAAM,SAAS;AAAA,MACf,cAAc,sBAAsB,SAAS,aAAa;AAAA,MAC1D,OAAO;AAAA,QACL,cAAc,SAAS,KAAK,OAAO;AAAA,QACnC,kBAAkB,SAAS,KAAK,OAAO;AAAA,MACzC;AAAA,MACA,SAAS;AAAA,QACP,WAAW;AAAA,UACT;AAAA,UACA;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,MACA,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU;AAAA,IACZ;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,OAAO,KAAK,QAAQ,OAAO;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,cAAc,SAAS,GAAG,YAAY,IAAI;AAElD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AAEA,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AACpB,kBAAM,OAAO,MAAM;AAEnB,oBAAQ,MAAM;AAAA,cACZ,KAAK,mBAAmB;AACtB,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,WAAW,MAAM;AAAA,gBACnB,CAAC;AACD;AAAA,cACF;AAAA,cAEA,KAAK,cAAc;AACjB,+BAAe,sBAAsB,MAAM,aAAa;AACxD,sBAAM,SAAS,MAAM,SAAS,KAAK;AAEnC,wBAAQ;AAAA,kBACN,cAAc,OAAO;AAAA,kBACrB,kBAAkB,OAAO;AAAA,gBAC3B;AAAA,cACF;AAAA,cAEA,SAAS;AACP;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS;AAAA,QACP,WAAW;AAAA,UACT;AAAA,UACA;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,MACA,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AACF;AAIA,IAAM,2BAA2B,cAAE,OAAO;AAAA,EACxC,MAAM,cAAE,OAAO;AAAA,EACf,eAAe,cAAE,OAAO;AAAA,EACxB,MAAM,cAAE,OAAO;AAAA,IACb,QAAQ,cAAE,OAAO;AAAA,MACf,cAAc,cAAE,OAAO;AAAA,MACvB,eAAe,cAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH,CAAC;AACH,CAAC;AAID,IAAM,wBAAwB,cAAE,mBAAmB,cAAc;AAAA,EAC/D,cAAE,OAAO;AAAA,IACP,YAAY,cAAE,QAAQ,cAAc;AAAA,EACtC,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,YAAY,cAAE,QAAQ,2BAA2B;AAAA,EACnD,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,YAAY,cAAE,QAAQ,gBAAgB;AAAA,EACxC,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,YAAY,cAAE,QAAQ,iBAAiB;AAAA,IACvC,MAAM,cAAE,OAAO;AAAA,EACjB,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,YAAY,cAAE,QAAQ,qBAAqB;AAAA,EAC7C,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,YAAY,cAAE,QAAQ,uBAAuB;AAAA,EAC/C,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,YAAY,cAAE,QAAQ,YAAY;AAAA,IAClC,eAAe,cAAE,OAAO;AAAA,IACxB,UAAU,cAAE,OAAO;AAAA,MACjB,MAAM,cAAE,OAAO;AAAA,QACb,QAAQ,cAAE,OAAO;AAAA,UACf,cAAc,cAAE,OAAO;AAAA,UACvB,eAAe,cAAE,OAAO;AAAA,QAC1B,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACH,CAAC;;;AD/OM,SAAS,aACd,UAAkC,CAAC,GACnB;AAvDlB;AAwDE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAA+B,CAAC,MAChC;AAvEJ,QAAAC;AAwEI,eAAI,wBAAwB,SAAS,UAAU;AAAA,MAC7C,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AAEzB,SAAO;AACT;AAKO,IAAM,SAAS,aAAa;","names":["import_provider_utils","import_provider","import_provider_utils","import_zod","_a"]}
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,376 @@
|
|
|
1
|
+
// src/cohere-provider.ts
|
|
2
|
+
import {
|
|
3
|
+
generateId,
|
|
4
|
+
loadApiKey,
|
|
5
|
+
withoutTrailingSlash
|
|
6
|
+
} from "@ai-sdk/provider-utils";
|
|
7
|
+
|
|
8
|
+
// src/cohere-chat-language-model.ts
|
|
9
|
+
import {
|
|
10
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
11
|
+
} from "@ai-sdk/provider";
|
|
12
|
+
import {
|
|
13
|
+
createJsonResponseHandler,
|
|
14
|
+
createJsonStreamResponseHandler,
|
|
15
|
+
postJsonToApi
|
|
16
|
+
} from "@ai-sdk/provider-utils";
|
|
17
|
+
import { z as z2 } from "zod";
|
|
18
|
+
|
|
19
|
+
// src/cohere-error.ts
|
|
20
|
+
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
21
|
+
import { z } from "zod";
|
|
22
|
+
var cohereErrorDataSchema = z.object({
|
|
23
|
+
message: z.string()
|
|
24
|
+
});
|
|
25
|
+
var cohereFailedResponseHandler = createJsonErrorResponseHandler({
|
|
26
|
+
errorSchema: cohereErrorDataSchema,
|
|
27
|
+
errorToMessage: (data) => data.message
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
// src/convert-to-cohere-chat-prompt.ts
|
|
31
|
+
import {
|
|
32
|
+
UnsupportedFunctionalityError
|
|
33
|
+
} from "@ai-sdk/provider";
|
|
34
|
+
function convertToCohereChatPrompt(prompt) {
|
|
35
|
+
const messages = [];
|
|
36
|
+
for (const { role, content } of prompt) {
|
|
37
|
+
switch (role) {
|
|
38
|
+
case "system": {
|
|
39
|
+
messages.push({ role: "SYSTEM", message: content });
|
|
40
|
+
break;
|
|
41
|
+
}
|
|
42
|
+
case "user": {
|
|
43
|
+
messages.push({
|
|
44
|
+
role: "USER",
|
|
45
|
+
message: content.map((part) => {
|
|
46
|
+
switch (part.type) {
|
|
47
|
+
case "text": {
|
|
48
|
+
return part.text;
|
|
49
|
+
}
|
|
50
|
+
case "image": {
|
|
51
|
+
throw new UnsupportedFunctionalityError({
|
|
52
|
+
functionality: "image-part"
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}).join("")
|
|
57
|
+
});
|
|
58
|
+
break;
|
|
59
|
+
}
|
|
60
|
+
case "assistant": {
|
|
61
|
+
let text = "";
|
|
62
|
+
const toolCalls = [];
|
|
63
|
+
for (const part of content) {
|
|
64
|
+
switch (part.type) {
|
|
65
|
+
case "text": {
|
|
66
|
+
text += part.text;
|
|
67
|
+
break;
|
|
68
|
+
}
|
|
69
|
+
case "tool-call": {
|
|
70
|
+
throw new UnsupportedFunctionalityError({
|
|
71
|
+
functionality: "tool-call"
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
default: {
|
|
75
|
+
const _exhaustiveCheck = part;
|
|
76
|
+
throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
messages.push({
|
|
81
|
+
role: "CHATBOT",
|
|
82
|
+
message: text,
|
|
83
|
+
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
|
|
84
|
+
});
|
|
85
|
+
break;
|
|
86
|
+
}
|
|
87
|
+
case "tool": {
|
|
88
|
+
throw new UnsupportedFunctionalityError({
|
|
89
|
+
functionality: "tool role"
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
default: {
|
|
93
|
+
const _exhaustiveCheck = role;
|
|
94
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
return messages;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// src/map-cohere-finish-reason.ts
|
|
102
|
+
function mapCohereFinishReason(finishReason) {
|
|
103
|
+
switch (finishReason) {
|
|
104
|
+
case "COMPLETE":
|
|
105
|
+
case "STOP_SEQUENCE":
|
|
106
|
+
return "stop";
|
|
107
|
+
case "MAX_TOKENS":
|
|
108
|
+
return "length";
|
|
109
|
+
case "ERROR":
|
|
110
|
+
case "ERROR_LIMIT":
|
|
111
|
+
return "error";
|
|
112
|
+
case "ERROR_TOXIC":
|
|
113
|
+
return "content-filter";
|
|
114
|
+
case "USER_CANCEL":
|
|
115
|
+
return "other";
|
|
116
|
+
default:
|
|
117
|
+
return "unknown";
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// src/cohere-chat-language-model.ts
|
|
122
|
+
var CohereChatLanguageModel = class {
|
|
123
|
+
constructor(modelId, settings, config) {
|
|
124
|
+
this.specificationVersion = "v1";
|
|
125
|
+
this.defaultObjectGenerationMode = void 0;
|
|
126
|
+
this.modelId = modelId;
|
|
127
|
+
this.settings = settings;
|
|
128
|
+
this.config = config;
|
|
129
|
+
}
|
|
130
|
+
get provider() {
|
|
131
|
+
return this.config.provider;
|
|
132
|
+
}
|
|
133
|
+
getArgs({
|
|
134
|
+
mode,
|
|
135
|
+
prompt,
|
|
136
|
+
maxTokens,
|
|
137
|
+
temperature,
|
|
138
|
+
topP,
|
|
139
|
+
frequencyPenalty,
|
|
140
|
+
presencePenalty,
|
|
141
|
+
seed
|
|
142
|
+
}) {
|
|
143
|
+
const type = mode.type;
|
|
144
|
+
const chatPrompt = convertToCohereChatPrompt(prompt);
|
|
145
|
+
const [lastMessage, ...history] = chatPrompt;
|
|
146
|
+
const args = {
|
|
147
|
+
// model id:
|
|
148
|
+
model: this.modelId,
|
|
149
|
+
// model specific settings:
|
|
150
|
+
// none
|
|
151
|
+
// standardized settings:
|
|
152
|
+
frequency_penalty: frequencyPenalty,
|
|
153
|
+
presence_penalty: presencePenalty,
|
|
154
|
+
max_tokens: maxTokens,
|
|
155
|
+
temperature,
|
|
156
|
+
p: topP,
|
|
157
|
+
seed,
|
|
158
|
+
// messages:
|
|
159
|
+
chat_history: history,
|
|
160
|
+
message: lastMessage.role === "USER" ? lastMessage.message : void 0
|
|
161
|
+
};
|
|
162
|
+
switch (type) {
|
|
163
|
+
case "regular": {
|
|
164
|
+
return args;
|
|
165
|
+
}
|
|
166
|
+
case "object-json": {
|
|
167
|
+
throw new UnsupportedFunctionalityError2({
|
|
168
|
+
functionality: "object-json mode"
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
case "object-tool": {
|
|
172
|
+
throw new UnsupportedFunctionalityError2({
|
|
173
|
+
functionality: "object-tool mode"
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
case "object-grammar": {
|
|
177
|
+
throw new UnsupportedFunctionalityError2({
|
|
178
|
+
functionality: "object-grammar mode"
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
default: {
|
|
182
|
+
const _exhaustiveCheck = type;
|
|
183
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
async doGenerate(options) {
|
|
188
|
+
const args = this.getArgs(options);
|
|
189
|
+
const { responseHeaders, value: response } = await postJsonToApi({
|
|
190
|
+
url: `${this.config.baseURL}/chat`,
|
|
191
|
+
headers: this.config.headers(),
|
|
192
|
+
body: args,
|
|
193
|
+
failedResponseHandler: cohereFailedResponseHandler,
|
|
194
|
+
successfulResponseHandler: createJsonResponseHandler(
|
|
195
|
+
cohereChatResponseSchema
|
|
196
|
+
),
|
|
197
|
+
abortSignal: options.abortSignal,
|
|
198
|
+
fetch: this.config.fetch
|
|
199
|
+
});
|
|
200
|
+
const { chat_history, message, ...rawSettings } = args;
|
|
201
|
+
return {
|
|
202
|
+
text: response.text,
|
|
203
|
+
finishReason: mapCohereFinishReason(response.finish_reason),
|
|
204
|
+
usage: {
|
|
205
|
+
promptTokens: response.meta.tokens.input_tokens,
|
|
206
|
+
completionTokens: response.meta.tokens.output_tokens
|
|
207
|
+
},
|
|
208
|
+
rawCall: {
|
|
209
|
+
rawPrompt: {
|
|
210
|
+
chat_history,
|
|
211
|
+
message
|
|
212
|
+
},
|
|
213
|
+
rawSettings
|
|
214
|
+
},
|
|
215
|
+
rawResponse: { headers: responseHeaders },
|
|
216
|
+
warnings: void 0
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
async doStream(options) {
|
|
220
|
+
const args = this.getArgs(options);
|
|
221
|
+
const { responseHeaders, value: response } = await postJsonToApi({
|
|
222
|
+
url: `${this.config.baseURL}/chat`,
|
|
223
|
+
headers: this.config.headers(),
|
|
224
|
+
body: {
|
|
225
|
+
...args,
|
|
226
|
+
stream: true
|
|
227
|
+
},
|
|
228
|
+
failedResponseHandler: cohereFailedResponseHandler,
|
|
229
|
+
successfulResponseHandler: createJsonStreamResponseHandler(
|
|
230
|
+
cohereChatChunkSchema
|
|
231
|
+
),
|
|
232
|
+
abortSignal: options.abortSignal,
|
|
233
|
+
fetch: this.config.fetch
|
|
234
|
+
});
|
|
235
|
+
const { chat_history, message, ...rawSettings } = args;
|
|
236
|
+
let finishReason = "other";
|
|
237
|
+
let usage = {
|
|
238
|
+
promptTokens: Number.NaN,
|
|
239
|
+
completionTokens: Number.NaN
|
|
240
|
+
};
|
|
241
|
+
return {
|
|
242
|
+
stream: response.pipeThrough(
|
|
243
|
+
new TransformStream({
|
|
244
|
+
transform(chunk, controller) {
|
|
245
|
+
if (!chunk.success) {
|
|
246
|
+
finishReason = "error";
|
|
247
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
|
248
|
+
return;
|
|
249
|
+
}
|
|
250
|
+
const value = chunk.value;
|
|
251
|
+
const type = value.event_type;
|
|
252
|
+
switch (type) {
|
|
253
|
+
case "text-generation": {
|
|
254
|
+
controller.enqueue({
|
|
255
|
+
type: "text-delta",
|
|
256
|
+
textDelta: value.text
|
|
257
|
+
});
|
|
258
|
+
return;
|
|
259
|
+
}
|
|
260
|
+
case "stream-end": {
|
|
261
|
+
finishReason = mapCohereFinishReason(value.finish_reason);
|
|
262
|
+
const tokens = value.response.meta.tokens;
|
|
263
|
+
usage = {
|
|
264
|
+
promptTokens: tokens.input_tokens,
|
|
265
|
+
completionTokens: tokens.output_tokens
|
|
266
|
+
};
|
|
267
|
+
}
|
|
268
|
+
default: {
|
|
269
|
+
return;
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
},
|
|
273
|
+
flush(controller) {
|
|
274
|
+
controller.enqueue({
|
|
275
|
+
type: "finish",
|
|
276
|
+
finishReason,
|
|
277
|
+
usage
|
|
278
|
+
});
|
|
279
|
+
}
|
|
280
|
+
})
|
|
281
|
+
),
|
|
282
|
+
rawCall: {
|
|
283
|
+
rawPrompt: {
|
|
284
|
+
chat_history,
|
|
285
|
+
message
|
|
286
|
+
},
|
|
287
|
+
rawSettings
|
|
288
|
+
},
|
|
289
|
+
rawResponse: { headers: responseHeaders },
|
|
290
|
+
warnings: []
|
|
291
|
+
};
|
|
292
|
+
}
|
|
293
|
+
};
|
|
294
|
+
var cohereChatResponseSchema = z2.object({
|
|
295
|
+
text: z2.string(),
|
|
296
|
+
finish_reason: z2.string(),
|
|
297
|
+
meta: z2.object({
|
|
298
|
+
tokens: z2.object({
|
|
299
|
+
input_tokens: z2.number(),
|
|
300
|
+
output_tokens: z2.number()
|
|
301
|
+
})
|
|
302
|
+
})
|
|
303
|
+
});
|
|
304
|
+
var cohereChatChunkSchema = z2.discriminatedUnion("event_type", [
|
|
305
|
+
z2.object({
|
|
306
|
+
event_type: z2.literal("stream-start")
|
|
307
|
+
}),
|
|
308
|
+
z2.object({
|
|
309
|
+
event_type: z2.literal("search-queries-generation")
|
|
310
|
+
}),
|
|
311
|
+
z2.object({
|
|
312
|
+
event_type: z2.literal("search-results")
|
|
313
|
+
}),
|
|
314
|
+
z2.object({
|
|
315
|
+
event_type: z2.literal("text-generation"),
|
|
316
|
+
text: z2.string()
|
|
317
|
+
}),
|
|
318
|
+
z2.object({
|
|
319
|
+
event_type: z2.literal("citation-generation")
|
|
320
|
+
}),
|
|
321
|
+
z2.object({
|
|
322
|
+
event_type: z2.literal("tool-calls-generation")
|
|
323
|
+
}),
|
|
324
|
+
z2.object({
|
|
325
|
+
event_type: z2.literal("stream-end"),
|
|
326
|
+
finish_reason: z2.string(),
|
|
327
|
+
response: z2.object({
|
|
328
|
+
meta: z2.object({
|
|
329
|
+
tokens: z2.object({
|
|
330
|
+
input_tokens: z2.number(),
|
|
331
|
+
output_tokens: z2.number()
|
|
332
|
+
})
|
|
333
|
+
})
|
|
334
|
+
})
|
|
335
|
+
})
|
|
336
|
+
]);
|
|
337
|
+
|
|
338
|
+
// src/cohere-provider.ts
|
|
339
|
+
function createCohere(options = {}) {
|
|
340
|
+
var _a;
|
|
341
|
+
const baseURL = (_a = withoutTrailingSlash(options.baseURL)) != null ? _a : "https://api.cohere.com/v1";
|
|
342
|
+
const getHeaders = () => ({
|
|
343
|
+
Authorization: `Bearer ${loadApiKey({
|
|
344
|
+
apiKey: options.apiKey,
|
|
345
|
+
environmentVariableName: "COHERE_API_KEY",
|
|
346
|
+
description: "Cohere"
|
|
347
|
+
})}`,
|
|
348
|
+
...options.headers
|
|
349
|
+
});
|
|
350
|
+
const createChatModel = (modelId, settings = {}) => {
|
|
351
|
+
var _a2;
|
|
352
|
+
return new CohereChatLanguageModel(modelId, settings, {
|
|
353
|
+
provider: "mistral.chat",
|
|
354
|
+
baseURL,
|
|
355
|
+
headers: getHeaders,
|
|
356
|
+
generateId: (_a2 = options.generateId) != null ? _a2 : generateId,
|
|
357
|
+
fetch: options.fetch
|
|
358
|
+
});
|
|
359
|
+
};
|
|
360
|
+
const provider = function(modelId, settings) {
|
|
361
|
+
if (new.target) {
|
|
362
|
+
throw new Error(
|
|
363
|
+
"The Cohere model function cannot be called with the new keyword."
|
|
364
|
+
);
|
|
365
|
+
}
|
|
366
|
+
return createChatModel(modelId, settings);
|
|
367
|
+
};
|
|
368
|
+
provider.languageModel = createChatModel;
|
|
369
|
+
return provider;
|
|
370
|
+
}
|
|
371
|
+
var cohere = createCohere();
|
|
372
|
+
export {
|
|
373
|
+
cohere,
|
|
374
|
+
createCohere
|
|
375
|
+
};
|
|
376
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/cohere-provider.ts","../src/cohere-chat-language-model.ts","../src/cohere-error.ts","../src/convert-to-cohere-chat-prompt.ts","../src/map-cohere-finish-reason.ts"],"sourcesContent":["import {\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { CohereChatModelId, CohereChatSettings } from './cohere-chat-settings';\nimport { CohereChatLanguageModel } from './cohere-chat-language-model';\n\nexport interface CohereProvider {\n (\n modelId: CohereChatModelId,\n settings?: CohereChatSettings,\n ): CohereChatLanguageModel;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(\n modelId: CohereChatModelId,\n settings?: CohereChatSettings,\n ): CohereChatLanguageModel;\n}\n\nexport interface CohereProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.cohere.com/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: typeof fetch;\n\n generateId?: () => string;\n}\n\n/**\nCreate a Cohere AI provider instance.\n */\nexport function createCohere(\n options: CohereProviderSettings = {},\n): CohereProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.cohere.com/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'COHERE_API_KEY',\n description: 'Cohere',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: CohereChatModelId,\n settings: CohereChatSettings = {},\n ) =>\n new CohereChatLanguageModel(modelId, settings, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: CohereChatModelId,\n settings?: CohereChatSettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Cohere model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n\n return provider as CohereProvider;\n}\n\n/**\nDefault Cohere provider instance.\n */\nexport const cohere = createCohere();\n","import {\n LanguageModelV1,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n ParseResult,\n createJsonResponseHandler,\n createJsonStreamResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { CohereChatModelId, CohereChatSettings } from './cohere-chat-settings';\nimport { cohereFailedResponseHandler } from './cohere-error';\nimport { convertToCohereChatPrompt } from './convert-to-cohere-chat-prompt';\nimport { mapCohereFinishReason } from './map-cohere-finish-reason';\n\ntype CohereChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n generateId: () => string;\n fetch?: typeof fetch;\n};\n\nexport class CohereChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: CohereChatModelId;\n readonly settings: CohereChatSettings;\n\n private readonly config: CohereChatConfig;\n\n constructor(\n modelId: CohereChatModelId,\n settings: CohereChatSettings,\n config: CohereChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n // Cohere distinguishes between the current message and the chat history\n const chatPrompt = convertToCohereChatPrompt(prompt);\n const [lastMessage, ...history] = chatPrompt;\n\n const args = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n // none\n\n // standardized settings:\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n max_tokens: maxTokens,\n temperature,\n p: topP,\n seed,\n\n // messages:\n chat_history: history,\n message: lastMessage.role === 'USER' ? lastMessage.message : undefined,\n };\n\n switch (type) {\n case 'regular': {\n return args;\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-json mode',\n });\n }\n\n case 'object-tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-tool mode',\n });\n }\n\n case 'object-grammar': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-grammar mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const args = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat`,\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: cohereFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n cohereChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { chat_history, message, ...rawSettings } = args;\n\n return {\n text: response.text,\n finishReason: mapCohereFinishReason(response.finish_reason),\n usage: {\n promptTokens: response.meta.tokens.input_tokens,\n completionTokens: response.meta.tokens.output_tokens,\n },\n rawCall: {\n rawPrompt: {\n chat_history,\n message,\n },\n rawSettings,\n },\n rawResponse: { headers: responseHeaders },\n warnings: undefined,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const args = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat`,\n headers: this.config.headers(),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: cohereFailedResponseHandler,\n successfulResponseHandler: createJsonStreamResponseHandler(\n cohereChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { chat_history, message, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'other';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof cohereChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n const type = value.event_type;\n\n switch (type) {\n case 'text-generation': {\n controller.enqueue({\n type: 'text-delta',\n textDelta: value.text,\n });\n return;\n }\n\n case 'stream-end': {\n finishReason = mapCohereFinishReason(value.finish_reason);\n const tokens = value.response.meta.tokens;\n\n usage = {\n promptTokens: tokens.input_tokens,\n completionTokens: tokens.output_tokens,\n };\n }\n\n default: {\n return;\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n rawCall: {\n rawPrompt: {\n chat_history,\n message,\n },\n rawSettings,\n },\n rawResponse: { headers: responseHeaders },\n warnings: [],\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst cohereChatResponseSchema = z.object({\n text: z.string(),\n finish_reason: z.string(),\n meta: z.object({\n tokens: z.object({\n input_tokens: z.number(),\n output_tokens: z.number(),\n }),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst cohereChatChunkSchema = z.discriminatedUnion('event_type', [\n z.object({\n event_type: z.literal('stream-start'),\n }),\n z.object({\n event_type: z.literal('search-queries-generation'),\n }),\n z.object({\n event_type: z.literal('search-results'),\n }),\n z.object({\n event_type: z.literal('text-generation'),\n text: z.string(),\n }),\n z.object({\n event_type: z.literal('citation-generation'),\n }),\n z.object({\n event_type: z.literal('tool-calls-generation'),\n }),\n z.object({\n event_type: z.literal('stream-end'),\n finish_reason: z.string(),\n response: z.object({\n meta: z.object({\n tokens: z.object({\n input_tokens: z.number(),\n output_tokens: z.number(),\n }),\n }),\n }),\n }),\n]);\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst cohereErrorDataSchema = z.object({\n message: z.string(),\n});\n\nexport type CohereErrorData = z.infer<typeof cohereErrorDataSchema>;\n\nexport const cohereFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: cohereErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { CohereChatPrompt } from './cohere-chat-prompt';\n\nexport function convertToCohereChatPrompt(\n prompt: LanguageModelV1Prompt,\n): CohereChatPrompt {\n const messages: CohereChatPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n messages.push({ role: 'SYSTEM', message: content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'USER',\n message: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'image-part',\n });\n }\n }\n })\n .join(''),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n name: string;\n parameters: object;\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call',\n });\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: 'CHATBOT',\n message: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool role',\n });\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapCohereFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'COMPLETE':\n case 'STOP_SEQUENCE':\n return 'stop';\n\n case 'MAX_TOKENS':\n return 'length';\n\n case 'ERROR':\n case 'ERROR_LIMIT':\n return 'error';\n\n case 'ERROR_TOXIC':\n return 'content-filter';\n\n case 'USER_CANCEL':\n return 'other';\n\n default:\n return 'unknown';\n }\n}\n"],"mappings":";AAAA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;;;ACJP;AAAA,EAIE,iCAAAA;AAAA,OACK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;;;ACZlB,SAAS,sCAAsC;AAC/C,SAAS,SAAS;AAElB,IAAM,wBAAwB,EAAE,OAAO;AAAA,EACrC,SAAS,EAAE,OAAO;AACpB,CAAC;AAIM,IAAM,8BAA8B,+BAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;ACZD;AAAA,EAEE;AAAA,OACK;AAGA,SAAS,0BACd,QACkB;AAClB,QAAM,WAA6B,CAAC;AAEpC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,QAAQ,CAAC;AAClD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QACN,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK;AAAA,cACd;AAAA,cACA,KAAK,SAAS;AACZ,sBAAM,IAAI,8BAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC,EACA,KAAK,EAAE;AAAA,QACZ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAGD,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,8BAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,cAAM,IAAI,8BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACnFO,SAAS,sBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET;AACE,aAAO;AAAA,EACX;AACF;;;AHAO,IAAM,0BAAN,MAAyD;AAAA,EAS9D,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAGlB,UAAM,aAAa,0BAA0B,MAAM;AACnD,UAAM,CAAC,aAAa,GAAG,OAAO,IAAI;AAElC,UAAM,OAAO;AAAA;AAAA,MAEX,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,MAMZ,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,YAAY;AAAA,MACZ;AAAA,MACA,GAAG;AAAA,MACH;AAAA;AAAA,MAGA,cAAc;AAAA,MACd,SAAS,YAAY,SAAS,SAAS,YAAY,UAAU;AAAA,IAC/D;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,eAAO;AAAA,MACT;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAIC,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,kBAAkB;AACrB,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,OAAO,KAAK,QAAQ,OAAO;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,cAAc,SAAS,GAAG,YAAY,IAAI;AAElD,WAAO;AAAA,MACL,MAAM,SAAS;AAAA,MACf,cAAc,sBAAsB,SAAS,aAAa;AAAA,MAC1D,OAAO;AAAA,QACL,cAAc,SAAS,KAAK,OAAO;AAAA,QACnC,kBAAkB,SAAS,KAAK,OAAO;AAAA,MACzC;AAAA,MACA,SAAS;AAAA,QACP,WAAW;AAAA,UACT;AAAA,UACA;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,MACA,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU;AAAA,IACZ;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,OAAO,KAAK,QAAQ,OAAO;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,cAAc,SAAS,GAAG,YAAY,IAAI;AAElD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AAEA,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AACpB,kBAAM,OAAO,MAAM;AAEnB,oBAAQ,MAAM;AAAA,cACZ,KAAK,mBAAmB;AACtB,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,WAAW,MAAM;AAAA,gBACnB,CAAC;AACD;AAAA,cACF;AAAA,cAEA,KAAK,cAAc;AACjB,+BAAe,sBAAsB,MAAM,aAAa;AACxD,sBAAM,SAAS,MAAM,SAAS,KAAK;AAEnC,wBAAQ;AAAA,kBACN,cAAc,OAAO;AAAA,kBACrB,kBAAkB,OAAO;AAAA,gBAC3B;AAAA,cACF;AAAA,cAEA,SAAS;AACP;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS;AAAA,QACP,WAAW;AAAA,UACT;AAAA,UACA;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,MACA,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AACF;AAIA,IAAM,2BAA2BC,GAAE,OAAO;AAAA,EACxC,MAAMA,GAAE,OAAO;AAAA,EACf,eAAeA,GAAE,OAAO;AAAA,EACxB,MAAMA,GAAE,OAAO;AAAA,IACb,QAAQA,GAAE,OAAO;AAAA,MACf,cAAcA,GAAE,OAAO;AAAA,MACvB,eAAeA,GAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH,CAAC;AACH,CAAC;AAID,IAAM,wBAAwBA,GAAE,mBAAmB,cAAc;AAAA,EAC/DA,GAAE,OAAO;AAAA,IACP,YAAYA,GAAE,QAAQ,cAAc;AAAA,EACtC,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,YAAYA,GAAE,QAAQ,2BAA2B;AAAA,EACnD,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,YAAYA,GAAE,QAAQ,gBAAgB;AAAA,EACxC,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,YAAYA,GAAE,QAAQ,iBAAiB;AAAA,IACvC,MAAMA,GAAE,OAAO;AAAA,EACjB,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,YAAYA,GAAE,QAAQ,qBAAqB;AAAA,EAC7C,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,YAAYA,GAAE,QAAQ,uBAAuB;AAAA,EAC/C,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,YAAYA,GAAE,QAAQ,YAAY;AAAA,IAClC,eAAeA,GAAE,OAAO;AAAA,IACxB,UAAUA,GAAE,OAAO;AAAA,MACjB,MAAMA,GAAE,OAAO;AAAA,QACb,QAAQA,GAAE,OAAO;AAAA,UACf,cAAcA,GAAE,OAAO;AAAA,UACvB,eAAeA,GAAE,OAAO;AAAA,QAC1B,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACH,CAAC;;;AD/OM,SAAS,aACd,UAAkC,CAAC,GACnB;AAvDlB;AAwDE,QAAM,WACJ,0BAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAA+B,CAAC,MAChC;AAvEJ,QAAAC;AAwEI,eAAI,wBAAwB,SAAS,UAAU;AAAA,MAC7C,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AAEzB,SAAO;AACT;AAKO,IAAM,SAAS,aAAa;","names":["UnsupportedFunctionalityError","z","UnsupportedFunctionalityError","z","_a"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@ai-sdk/cohere",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"license": "Apache-2.0",
|
|
5
|
+
"sideEffects": false,
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"module": "./dist/index.mjs",
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"files": [
|
|
10
|
+
"dist/**/*"
|
|
11
|
+
],
|
|
12
|
+
"exports": {
|
|
13
|
+
"./package.json": "./package.json",
|
|
14
|
+
".": {
|
|
15
|
+
"types": "./dist/index.d.ts",
|
|
16
|
+
"import": "./dist/index.mjs",
|
|
17
|
+
"require": "./dist/index.js"
|
|
18
|
+
}
|
|
19
|
+
},
|
|
20
|
+
"dependencies": {
|
|
21
|
+
"@ai-sdk/provider": "0.0.10",
|
|
22
|
+
"@ai-sdk/provider-utils": "0.0.15"
|
|
23
|
+
},
|
|
24
|
+
"devDependencies": {
|
|
25
|
+
"@types/node": "^18",
|
|
26
|
+
"tsup": "^8",
|
|
27
|
+
"typescript": "5.1.3",
|
|
28
|
+
"zod": "3.23.8",
|
|
29
|
+
"@vercel/ai-tsconfig": "0.0.0"
|
|
30
|
+
},
|
|
31
|
+
"peerDependencies": {
|
|
32
|
+
"zod": "^3.0.0"
|
|
33
|
+
},
|
|
34
|
+
"engines": {
|
|
35
|
+
"node": ">=18"
|
|
36
|
+
},
|
|
37
|
+
"publishConfig": {
|
|
38
|
+
"access": "public"
|
|
39
|
+
},
|
|
40
|
+
"homepage": "https://sdk.vercel.ai/docs",
|
|
41
|
+
"repository": {
|
|
42
|
+
"type": "git",
|
|
43
|
+
"url": "git+https://github.com/vercel/ai.git"
|
|
44
|
+
},
|
|
45
|
+
"bugs": {
|
|
46
|
+
"url": "https://github.com/vercel/ai/issues"
|
|
47
|
+
},
|
|
48
|
+
"keywords": [
|
|
49
|
+
"ai"
|
|
50
|
+
],
|
|
51
|
+
"scripts": {
|
|
52
|
+
"build": "tsup",
|
|
53
|
+
"clean": "rm -rf dist",
|
|
54
|
+
"dev": "tsup --watch",
|
|
55
|
+
"lint": "eslint \"./**/*.ts*\"",
|
|
56
|
+
"type-check": "tsc --noEmit",
|
|
57
|
+
"prettier-check": "prettier --check \"./**/*.ts*\"",
|
|
58
|
+
"test": "pnpm test:node && pnpm test:edge",
|
|
59
|
+
"test:edge": "vitest --config vitest.edge.config.js --run",
|
|
60
|
+
"test:node": "vitest --config vitest.node.config.js --run"
|
|
61
|
+
}
|
|
62
|
+
}
|