@ai-sdk/anthropic 0.0.1 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +67 -0
- package/dist/index.d.ts +67 -0
- package/dist/index.js +476 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +456 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +1 -1
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { LanguageModelV1 } from '@ai-sdk/provider';
|
|
2
|
+
|
|
3
|
+
type AnthropicMessagesModelId = 'claude-3-opus-20240229' | 'claude-3-sonnet-20240229' | 'claude-3-haiku-20240307' | (string & {});
|
|
4
|
+
interface AnthropicMessagesSettings {
|
|
5
|
+
/**
|
|
6
|
+
Only sample from the top K options for each subsequent token.
|
|
7
|
+
|
|
8
|
+
Used to remove "long tail" low probability responses.
|
|
9
|
+
Recommended for advanced use cases only. You usually only need to use temperature.
|
|
10
|
+
*/
|
|
11
|
+
topK?: number;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
type AnthropicMessagesConfig = {
|
|
15
|
+
provider: string;
|
|
16
|
+
baseURL: string;
|
|
17
|
+
headers: () => Record<string, string | undefined>;
|
|
18
|
+
};
|
|
19
|
+
declare class AnthropicMessagesLanguageModel implements LanguageModelV1 {
|
|
20
|
+
readonly specificationVersion = "v1";
|
|
21
|
+
readonly defaultObjectGenerationMode = "tool";
|
|
22
|
+
readonly modelId: AnthropicMessagesModelId;
|
|
23
|
+
readonly settings: AnthropicMessagesSettings;
|
|
24
|
+
private readonly config;
|
|
25
|
+
constructor(modelId: AnthropicMessagesModelId, settings: AnthropicMessagesSettings, config: AnthropicMessagesConfig);
|
|
26
|
+
get provider(): string;
|
|
27
|
+
private getArgs;
|
|
28
|
+
doGenerate(options: Parameters<LanguageModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>>;
|
|
29
|
+
doStream(options: Parameters<LanguageModelV1['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>>;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Anthropic provider.
|
|
34
|
+
*/
|
|
35
|
+
declare class Anthropic {
|
|
36
|
+
/**
|
|
37
|
+
* Base URL for the Anthropic API calls.
|
|
38
|
+
*/
|
|
39
|
+
readonly baseURL: string;
|
|
40
|
+
readonly apiKey?: string;
|
|
41
|
+
/**
|
|
42
|
+
* Creates a new Anthropic provider instance.
|
|
43
|
+
*/
|
|
44
|
+
constructor(options?: {
|
|
45
|
+
/**
|
|
46
|
+
* Base URL for the Anthropic API calls.
|
|
47
|
+
*/
|
|
48
|
+
baseURL?: string;
|
|
49
|
+
/**
|
|
50
|
+
* @deprecated Use `baseURL` instead.
|
|
51
|
+
*/
|
|
52
|
+
baseUrl?: string;
|
|
53
|
+
/**
|
|
54
|
+
* API key for authenticating requests.
|
|
55
|
+
*/
|
|
56
|
+
apiKey?: string;
|
|
57
|
+
generateId?: () => string;
|
|
58
|
+
});
|
|
59
|
+
private get baseConfig();
|
|
60
|
+
messages(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): AnthropicMessagesLanguageModel;
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Default Anthropic provider instance.
|
|
64
|
+
*/
|
|
65
|
+
declare const anthropic: Anthropic;
|
|
66
|
+
|
|
67
|
+
export { Anthropic, anthropic };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { LanguageModelV1 } from '@ai-sdk/provider';
|
|
2
|
+
|
|
3
|
+
type AnthropicMessagesModelId = 'claude-3-opus-20240229' | 'claude-3-sonnet-20240229' | 'claude-3-haiku-20240307' | (string & {});
|
|
4
|
+
interface AnthropicMessagesSettings {
|
|
5
|
+
/**
|
|
6
|
+
Only sample from the top K options for each subsequent token.
|
|
7
|
+
|
|
8
|
+
Used to remove "long tail" low probability responses.
|
|
9
|
+
Recommended for advanced use cases only. You usually only need to use temperature.
|
|
10
|
+
*/
|
|
11
|
+
topK?: number;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
type AnthropicMessagesConfig = {
|
|
15
|
+
provider: string;
|
|
16
|
+
baseURL: string;
|
|
17
|
+
headers: () => Record<string, string | undefined>;
|
|
18
|
+
};
|
|
19
|
+
declare class AnthropicMessagesLanguageModel implements LanguageModelV1 {
|
|
20
|
+
readonly specificationVersion = "v1";
|
|
21
|
+
readonly defaultObjectGenerationMode = "tool";
|
|
22
|
+
readonly modelId: AnthropicMessagesModelId;
|
|
23
|
+
readonly settings: AnthropicMessagesSettings;
|
|
24
|
+
private readonly config;
|
|
25
|
+
constructor(modelId: AnthropicMessagesModelId, settings: AnthropicMessagesSettings, config: AnthropicMessagesConfig);
|
|
26
|
+
get provider(): string;
|
|
27
|
+
private getArgs;
|
|
28
|
+
doGenerate(options: Parameters<LanguageModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>>;
|
|
29
|
+
doStream(options: Parameters<LanguageModelV1['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>>;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Anthropic provider.
|
|
34
|
+
*/
|
|
35
|
+
declare class Anthropic {
|
|
36
|
+
/**
|
|
37
|
+
* Base URL for the Anthropic API calls.
|
|
38
|
+
*/
|
|
39
|
+
readonly baseURL: string;
|
|
40
|
+
readonly apiKey?: string;
|
|
41
|
+
/**
|
|
42
|
+
* Creates a new Anthropic provider instance.
|
|
43
|
+
*/
|
|
44
|
+
constructor(options?: {
|
|
45
|
+
/**
|
|
46
|
+
* Base URL for the Anthropic API calls.
|
|
47
|
+
*/
|
|
48
|
+
baseURL?: string;
|
|
49
|
+
/**
|
|
50
|
+
* @deprecated Use `baseURL` instead.
|
|
51
|
+
*/
|
|
52
|
+
baseUrl?: string;
|
|
53
|
+
/**
|
|
54
|
+
* API key for authenticating requests.
|
|
55
|
+
*/
|
|
56
|
+
apiKey?: string;
|
|
57
|
+
generateId?: () => string;
|
|
58
|
+
});
|
|
59
|
+
private get baseConfig();
|
|
60
|
+
messages(modelId: AnthropicMessagesModelId, settings?: AnthropicMessagesSettings): AnthropicMessagesLanguageModel;
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Default Anthropic provider instance.
|
|
64
|
+
*/
|
|
65
|
+
declare const anthropic: Anthropic;
|
|
66
|
+
|
|
67
|
+
export { Anthropic, anthropic };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,476 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var src_exports = {};
|
|
22
|
+
__export(src_exports, {
|
|
23
|
+
Anthropic: () => Anthropic,
|
|
24
|
+
anthropic: () => anthropic
|
|
25
|
+
});
|
|
26
|
+
module.exports = __toCommonJS(src_exports);
|
|
27
|
+
|
|
28
|
+
// src/anthropic-facade.ts
|
|
29
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
30
|
+
|
|
31
|
+
// src/anthropic-messages-language-model.ts
|
|
32
|
+
var import_provider2 = require("@ai-sdk/provider");
|
|
33
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
34
|
+
var import_zod2 = require("zod");
|
|
35
|
+
|
|
36
|
+
// src/anthropic-error.ts
|
|
37
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
38
|
+
var import_zod = require("zod");
|
|
39
|
+
var anthropicErrorDataSchema = import_zod.z.object({
|
|
40
|
+
type: import_zod.z.literal("error"),
|
|
41
|
+
error: import_zod.z.object({
|
|
42
|
+
type: import_zod.z.string(),
|
|
43
|
+
message: import_zod.z.string()
|
|
44
|
+
})
|
|
45
|
+
});
|
|
46
|
+
var anthropicFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
|
|
47
|
+
errorSchema: anthropicErrorDataSchema,
|
|
48
|
+
errorToMessage: (data) => data.error.message
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
// src/convert-to-anthropic-messages-prompt.ts
|
|
52
|
+
var import_provider = require("@ai-sdk/provider");
|
|
53
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
54
|
+
function convertToAnthropicMessagesPrompt(prompt) {
|
|
55
|
+
let system;
|
|
56
|
+
const messages = [];
|
|
57
|
+
for (const { role, content } of prompt) {
|
|
58
|
+
switch (role) {
|
|
59
|
+
case "system": {
|
|
60
|
+
system = content;
|
|
61
|
+
break;
|
|
62
|
+
}
|
|
63
|
+
case "user": {
|
|
64
|
+
messages.push({
|
|
65
|
+
role: "user",
|
|
66
|
+
content: content.map((part) => {
|
|
67
|
+
var _a;
|
|
68
|
+
switch (part.type) {
|
|
69
|
+
case "text": {
|
|
70
|
+
return { type: "text", text: part.text };
|
|
71
|
+
}
|
|
72
|
+
case "image": {
|
|
73
|
+
if (part.image instanceof URL) {
|
|
74
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
|
75
|
+
functionality: "URL image parts"
|
|
76
|
+
});
|
|
77
|
+
} else {
|
|
78
|
+
return {
|
|
79
|
+
type: "image",
|
|
80
|
+
source: {
|
|
81
|
+
type: "base64",
|
|
82
|
+
media_type: (_a = part.mimeType) != null ? _a : "image/jpeg",
|
|
83
|
+
data: (0, import_provider_utils2.convertUint8ArrayToBase64)(part.image)
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
})
|
|
90
|
+
});
|
|
91
|
+
break;
|
|
92
|
+
}
|
|
93
|
+
case "assistant": {
|
|
94
|
+
messages.push({
|
|
95
|
+
role: "assistant",
|
|
96
|
+
content: content.map((part) => {
|
|
97
|
+
switch (part.type) {
|
|
98
|
+
case "text": {
|
|
99
|
+
return { type: "text", text: part.text };
|
|
100
|
+
}
|
|
101
|
+
case "tool-call": {
|
|
102
|
+
return {
|
|
103
|
+
type: "tool_use",
|
|
104
|
+
id: part.toolCallId,
|
|
105
|
+
name: part.toolName,
|
|
106
|
+
input: part.args
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
})
|
|
111
|
+
});
|
|
112
|
+
break;
|
|
113
|
+
}
|
|
114
|
+
case "tool": {
|
|
115
|
+
messages.push({
|
|
116
|
+
role: "user",
|
|
117
|
+
content: content.map((part) => ({
|
|
118
|
+
type: "tool_result",
|
|
119
|
+
tool_use_id: part.toolCallId,
|
|
120
|
+
content: JSON.stringify(part.result),
|
|
121
|
+
is_error: part.isError
|
|
122
|
+
}))
|
|
123
|
+
});
|
|
124
|
+
break;
|
|
125
|
+
}
|
|
126
|
+
default: {
|
|
127
|
+
const _exhaustiveCheck = role;
|
|
128
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
return {
|
|
133
|
+
system,
|
|
134
|
+
messages
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// src/map-anthropic-stop-reason.ts
|
|
139
|
+
function mapAnthropicStopReason(finishReason) {
|
|
140
|
+
switch (finishReason) {
|
|
141
|
+
case "end_turn":
|
|
142
|
+
case "stop_sequence":
|
|
143
|
+
return "stop";
|
|
144
|
+
case "tool_use":
|
|
145
|
+
return "tool-calls";
|
|
146
|
+
case "max_tokens":
|
|
147
|
+
return "length";
|
|
148
|
+
default:
|
|
149
|
+
return "other";
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// src/anthropic-messages-language-model.ts
|
|
154
|
+
var AnthropicMessagesLanguageModel = class {
|
|
155
|
+
constructor(modelId, settings, config) {
|
|
156
|
+
this.specificationVersion = "v1";
|
|
157
|
+
this.defaultObjectGenerationMode = "tool";
|
|
158
|
+
this.modelId = modelId;
|
|
159
|
+
this.settings = settings;
|
|
160
|
+
this.config = config;
|
|
161
|
+
}
|
|
162
|
+
get provider() {
|
|
163
|
+
return this.config.provider;
|
|
164
|
+
}
|
|
165
|
+
getArgs({
|
|
166
|
+
mode,
|
|
167
|
+
prompt,
|
|
168
|
+
maxTokens,
|
|
169
|
+
temperature,
|
|
170
|
+
topP,
|
|
171
|
+
frequencyPenalty,
|
|
172
|
+
presencePenalty,
|
|
173
|
+
seed
|
|
174
|
+
}) {
|
|
175
|
+
var _a;
|
|
176
|
+
const type = mode.type;
|
|
177
|
+
const warnings = [];
|
|
178
|
+
if (frequencyPenalty != null) {
|
|
179
|
+
warnings.push({
|
|
180
|
+
type: "unsupported-setting",
|
|
181
|
+
setting: "frequencyPenalty"
|
|
182
|
+
});
|
|
183
|
+
}
|
|
184
|
+
if (presencePenalty != null) {
|
|
185
|
+
warnings.push({
|
|
186
|
+
type: "unsupported-setting",
|
|
187
|
+
setting: "presencePenalty"
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
if (seed != null) {
|
|
191
|
+
warnings.push({
|
|
192
|
+
type: "unsupported-setting",
|
|
193
|
+
setting: "seed"
|
|
194
|
+
});
|
|
195
|
+
}
|
|
196
|
+
const messagesPrompt = convertToAnthropicMessagesPrompt(prompt);
|
|
197
|
+
const baseArgs = {
|
|
198
|
+
// model id:
|
|
199
|
+
model: this.modelId,
|
|
200
|
+
// model specific settings:
|
|
201
|
+
top_k: this.settings.topK,
|
|
202
|
+
// standardized settings:
|
|
203
|
+
max_tokens: maxTokens != null ? maxTokens : 4096,
|
|
204
|
+
// 4096: max model output tokens
|
|
205
|
+
temperature,
|
|
206
|
+
// uses 0..1 scale
|
|
207
|
+
top_p: topP,
|
|
208
|
+
// prompt:
|
|
209
|
+
system: messagesPrompt.system,
|
|
210
|
+
messages: messagesPrompt.messages
|
|
211
|
+
};
|
|
212
|
+
switch (type) {
|
|
213
|
+
case "regular": {
|
|
214
|
+
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
215
|
+
return {
|
|
216
|
+
args: {
|
|
217
|
+
...baseArgs,
|
|
218
|
+
tools: tools == null ? void 0 : tools.map((tool) => ({
|
|
219
|
+
name: tool.name,
|
|
220
|
+
description: tool.description,
|
|
221
|
+
input_schema: tool.parameters
|
|
222
|
+
}))
|
|
223
|
+
},
|
|
224
|
+
warnings
|
|
225
|
+
};
|
|
226
|
+
}
|
|
227
|
+
case "object-json": {
|
|
228
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
229
|
+
functionality: "json-mode object generation"
|
|
230
|
+
});
|
|
231
|
+
}
|
|
232
|
+
case "object-tool": {
|
|
233
|
+
const { name, description, parameters } = mode.tool;
|
|
234
|
+
baseArgs.messages[baseArgs.messages.length - 1].content.push({
|
|
235
|
+
type: "text",
|
|
236
|
+
text: `
|
|
237
|
+
|
|
238
|
+
Use the '${name}' tool.`
|
|
239
|
+
});
|
|
240
|
+
return {
|
|
241
|
+
args: {
|
|
242
|
+
...baseArgs,
|
|
243
|
+
tools: [{ name, description, input_schema: parameters }]
|
|
244
|
+
},
|
|
245
|
+
warnings
|
|
246
|
+
};
|
|
247
|
+
}
|
|
248
|
+
case "object-grammar": {
|
|
249
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
250
|
+
functionality: "grammar-mode object generation"
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
default: {
|
|
254
|
+
const _exhaustiveCheck = type;
|
|
255
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
async doGenerate(options) {
|
|
260
|
+
const { args, warnings } = this.getArgs(options);
|
|
261
|
+
const response = await (0, import_provider_utils3.postJsonToApi)({
|
|
262
|
+
url: `${this.config.baseURL}/messages`,
|
|
263
|
+
headers: this.config.headers(),
|
|
264
|
+
body: args,
|
|
265
|
+
failedResponseHandler: anthropicFailedResponseHandler,
|
|
266
|
+
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
|
|
267
|
+
anthropicMessagesResponseSchema
|
|
268
|
+
),
|
|
269
|
+
abortSignal: options.abortSignal
|
|
270
|
+
});
|
|
271
|
+
const { messages: rawPrompt, ...rawSettings } = args;
|
|
272
|
+
let text = "";
|
|
273
|
+
for (const content of response.content) {
|
|
274
|
+
if (content.type === "text") {
|
|
275
|
+
text += content.text;
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
let toolCalls = void 0;
|
|
279
|
+
if (response.content.some((content) => content.type === "tool_use")) {
|
|
280
|
+
toolCalls = [];
|
|
281
|
+
for (const content of response.content) {
|
|
282
|
+
if (content.type === "tool_use") {
|
|
283
|
+
toolCalls.push({
|
|
284
|
+
toolCallType: "function",
|
|
285
|
+
toolCallId: content.id,
|
|
286
|
+
toolName: content.name,
|
|
287
|
+
args: JSON.stringify(content.input)
|
|
288
|
+
});
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
return {
|
|
293
|
+
text,
|
|
294
|
+
toolCalls,
|
|
295
|
+
finishReason: mapAnthropicStopReason(response.stop_reason),
|
|
296
|
+
usage: {
|
|
297
|
+
promptTokens: response.usage.input_tokens,
|
|
298
|
+
completionTokens: response.usage.output_tokens
|
|
299
|
+
},
|
|
300
|
+
rawCall: { rawPrompt, rawSettings },
|
|
301
|
+
warnings
|
|
302
|
+
};
|
|
303
|
+
}
|
|
304
|
+
async doStream(options) {
|
|
305
|
+
const { args, warnings } = this.getArgs(options);
|
|
306
|
+
const response = await (0, import_provider_utils3.postJsonToApi)({
|
|
307
|
+
url: `${this.config.baseURL}/messages`,
|
|
308
|
+
headers: this.config.headers(),
|
|
309
|
+
body: {
|
|
310
|
+
...args,
|
|
311
|
+
stream: true
|
|
312
|
+
},
|
|
313
|
+
failedResponseHandler: anthropicFailedResponseHandler,
|
|
314
|
+
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
|
|
315
|
+
anthropicMessagesChunkSchema
|
|
316
|
+
),
|
|
317
|
+
abortSignal: options.abortSignal
|
|
318
|
+
});
|
|
319
|
+
const { messages: rawPrompt, ...rawSettings } = args;
|
|
320
|
+
let finishReason = "other";
|
|
321
|
+
const usage = {
|
|
322
|
+
promptTokens: Number.NaN,
|
|
323
|
+
completionTokens: Number.NaN
|
|
324
|
+
};
|
|
325
|
+
return {
|
|
326
|
+
stream: response.pipeThrough(
|
|
327
|
+
new TransformStream({
|
|
328
|
+
transform(chunk, controller) {
|
|
329
|
+
if (!chunk.success) {
|
|
330
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
|
331
|
+
return;
|
|
332
|
+
}
|
|
333
|
+
const value = chunk.value;
|
|
334
|
+
switch (value.type) {
|
|
335
|
+
case "ping":
|
|
336
|
+
case "content_block_start":
|
|
337
|
+
case "content_block_stop": {
|
|
338
|
+
return;
|
|
339
|
+
}
|
|
340
|
+
case "content_block_delta": {
|
|
341
|
+
controller.enqueue({
|
|
342
|
+
type: "text-delta",
|
|
343
|
+
textDelta: value.delta.text
|
|
344
|
+
});
|
|
345
|
+
return;
|
|
346
|
+
}
|
|
347
|
+
case "message_start": {
|
|
348
|
+
usage.promptTokens = value.message.usage.input_tokens;
|
|
349
|
+
usage.completionTokens = value.message.usage.output_tokens;
|
|
350
|
+
return;
|
|
351
|
+
}
|
|
352
|
+
case "message_delta": {
|
|
353
|
+
usage.completionTokens = value.usage.output_tokens;
|
|
354
|
+
finishReason = mapAnthropicStopReason(value.delta.stop_reason);
|
|
355
|
+
return;
|
|
356
|
+
}
|
|
357
|
+
case "message_stop": {
|
|
358
|
+
controller.enqueue({ type: "finish", finishReason, usage });
|
|
359
|
+
return;
|
|
360
|
+
}
|
|
361
|
+
default: {
|
|
362
|
+
const _exhaustiveCheck = value;
|
|
363
|
+
throw new Error(`Unsupported chunk type: ${_exhaustiveCheck}`);
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
})
|
|
368
|
+
),
|
|
369
|
+
rawCall: { rawPrompt, rawSettings },
|
|
370
|
+
warnings
|
|
371
|
+
};
|
|
372
|
+
}
|
|
373
|
+
};
|
|
374
|
+
var anthropicMessagesResponseSchema = import_zod2.z.object({
|
|
375
|
+
type: import_zod2.z.literal("message"),
|
|
376
|
+
content: import_zod2.z.array(
|
|
377
|
+
import_zod2.z.discriminatedUnion("type", [
|
|
378
|
+
import_zod2.z.object({
|
|
379
|
+
type: import_zod2.z.literal("text"),
|
|
380
|
+
text: import_zod2.z.string()
|
|
381
|
+
}),
|
|
382
|
+
import_zod2.z.object({
|
|
383
|
+
type: import_zod2.z.literal("tool_use"),
|
|
384
|
+
id: import_zod2.z.string(),
|
|
385
|
+
name: import_zod2.z.string(),
|
|
386
|
+
input: import_zod2.z.unknown()
|
|
387
|
+
})
|
|
388
|
+
])
|
|
389
|
+
),
|
|
390
|
+
stop_reason: import_zod2.z.string().optional().nullable(),
|
|
391
|
+
usage: import_zod2.z.object({
|
|
392
|
+
input_tokens: import_zod2.z.number(),
|
|
393
|
+
output_tokens: import_zod2.z.number()
|
|
394
|
+
})
|
|
395
|
+
});
|
|
396
|
+
var anthropicMessagesChunkSchema = import_zod2.z.discriminatedUnion("type", [
|
|
397
|
+
import_zod2.z.object({
|
|
398
|
+
type: import_zod2.z.literal("message_start"),
|
|
399
|
+
message: import_zod2.z.object({
|
|
400
|
+
usage: import_zod2.z.object({
|
|
401
|
+
input_tokens: import_zod2.z.number(),
|
|
402
|
+
output_tokens: import_zod2.z.number()
|
|
403
|
+
})
|
|
404
|
+
})
|
|
405
|
+
}),
|
|
406
|
+
import_zod2.z.object({
|
|
407
|
+
type: import_zod2.z.literal("content_block_start"),
|
|
408
|
+
index: import_zod2.z.number(),
|
|
409
|
+
content_block: import_zod2.z.object({
|
|
410
|
+
type: import_zod2.z.literal("text"),
|
|
411
|
+
text: import_zod2.z.string()
|
|
412
|
+
})
|
|
413
|
+
}),
|
|
414
|
+
import_zod2.z.object({
|
|
415
|
+
type: import_zod2.z.literal("content_block_delta"),
|
|
416
|
+
index: import_zod2.z.number(),
|
|
417
|
+
delta: import_zod2.z.object({
|
|
418
|
+
type: import_zod2.z.literal("text_delta"),
|
|
419
|
+
text: import_zod2.z.string()
|
|
420
|
+
})
|
|
421
|
+
}),
|
|
422
|
+
import_zod2.z.object({
|
|
423
|
+
type: import_zod2.z.literal("content_block_stop"),
|
|
424
|
+
index: import_zod2.z.number()
|
|
425
|
+
}),
|
|
426
|
+
import_zod2.z.object({
|
|
427
|
+
type: import_zod2.z.literal("message_delta"),
|
|
428
|
+
delta: import_zod2.z.object({ stop_reason: import_zod2.z.string().optional().nullable() }),
|
|
429
|
+
usage: import_zod2.z.object({ output_tokens: import_zod2.z.number() })
|
|
430
|
+
}),
|
|
431
|
+
import_zod2.z.object({
|
|
432
|
+
type: import_zod2.z.literal("message_stop")
|
|
433
|
+
}),
|
|
434
|
+
import_zod2.z.object({
|
|
435
|
+
type: import_zod2.z.literal("ping")
|
|
436
|
+
})
|
|
437
|
+
]);
|
|
438
|
+
|
|
439
|
+
// src/anthropic-facade.ts
|
|
440
|
+
var Anthropic = class {
|
|
441
|
+
/**
|
|
442
|
+
* Creates a new Anthropic provider instance.
|
|
443
|
+
*/
|
|
444
|
+
constructor(options = {}) {
|
|
445
|
+
var _a, _b;
|
|
446
|
+
this.baseURL = (_b = (0, import_provider_utils4.withoutTrailingSlash)((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://api.anthropic.com/v1";
|
|
447
|
+
this.apiKey = options.apiKey;
|
|
448
|
+
}
|
|
449
|
+
get baseConfig() {
|
|
450
|
+
return {
|
|
451
|
+
baseURL: this.baseURL,
|
|
452
|
+
headers: () => ({
|
|
453
|
+
"anthropic-version": "2023-06-01",
|
|
454
|
+
"anthropic-beta": "tools-2024-04-04",
|
|
455
|
+
"x-api-key": (0, import_provider_utils4.loadApiKey)({
|
|
456
|
+
apiKey: this.apiKey,
|
|
457
|
+
environmentVariableName: "ANTHROPIC_API_KEY",
|
|
458
|
+
description: "Anthropic"
|
|
459
|
+
})
|
|
460
|
+
})
|
|
461
|
+
};
|
|
462
|
+
}
|
|
463
|
+
messages(modelId, settings = {}) {
|
|
464
|
+
return new AnthropicMessagesLanguageModel(modelId, settings, {
|
|
465
|
+
provider: "anthropic.messages",
|
|
466
|
+
...this.baseConfig
|
|
467
|
+
});
|
|
468
|
+
}
|
|
469
|
+
};
|
|
470
|
+
var anthropic = new Anthropic();
|
|
471
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
472
|
+
0 && (module.exports = {
|
|
473
|
+
Anthropic,
|
|
474
|
+
anthropic
|
|
475
|
+
});
|
|
476
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/anthropic-facade.ts","../src/anthropic-messages-language-model.ts","../src/anthropic-error.ts","../src/convert-to-anthropic-messages-prompt.ts","../src/map-anthropic-stop-reason.ts"],"sourcesContent":["export * from './anthropic-facade';\n","import { loadApiKey, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { AnthropicMessagesLanguageModel } from './anthropic-messages-language-model';\nimport {\n AnthropicMessagesModelId,\n AnthropicMessagesSettings,\n} from './anthropic-messages-settings';\n\n/**\n * Anthropic provider.\n */\nexport class Anthropic {\n /**\n * Base URL for the Anthropic API calls.\n */\n readonly baseURL: string;\n\n readonly apiKey?: string;\n\n /**\n * Creates a new Anthropic provider instance.\n */\n constructor(\n options: {\n /**\n * Base URL for the Anthropic API calls.\n */\n baseURL?: string;\n\n /**\n * @deprecated Use `baseURL` instead.\n */\n baseUrl?: string;\n\n /**\n * API key for authenticating requests.\n */\n apiKey?: string;\n\n generateId?: () => string;\n } = {},\n ) {\n this.baseURL =\n withoutTrailingSlash(options.baseURL ?? options.baseUrl) ??\n 'https://api.anthropic.com/v1';\n this.apiKey = options.apiKey;\n }\n\n private get baseConfig() {\n return {\n baseURL: this.baseURL,\n headers: () => ({\n 'anthropic-version': '2023-06-01',\n 'anthropic-beta': 'tools-2024-04-04',\n 'x-api-key': loadApiKey({\n apiKey: this.apiKey,\n environmentVariableName: 'ANTHROPIC_API_KEY',\n description: 'Anthropic',\n }),\n }),\n };\n }\n\n messages(\n modelId: AnthropicMessagesModelId,\n settings: AnthropicMessagesSettings = {},\n ) {\n return new AnthropicMessagesLanguageModel(modelId, settings, {\n provider: 'anthropic.messages',\n ...this.baseConfig,\n });\n }\n}\n\n/**\n * Default Anthropic provider instance.\n */\nexport const anthropic = new Anthropic();\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1FunctionToolCall,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { anthropicFailedResponseHandler } from './anthropic-error';\nimport {\n AnthropicMessagesModelId,\n AnthropicMessagesSettings,\n} from './anthropic-messages-settings';\nimport { convertToAnthropicMessagesPrompt } from './convert-to-anthropic-messages-prompt';\nimport { mapAnthropicStopReason } from './map-anthropic-stop-reason';\n\ntype AnthropicMessagesConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n};\n\nexport class AnthropicMessagesLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'tool';\n\n readonly modelId: AnthropicMessagesModelId;\n readonly settings: AnthropicMessagesSettings;\n\n private readonly config: AnthropicMessagesConfig;\n\n constructor(\n modelId: AnthropicMessagesModelId,\n settings: AnthropicMessagesSettings,\n config: AnthropicMessagesConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (seed != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'seed',\n });\n }\n\n const messagesPrompt = convertToAnthropicMessagesPrompt(prompt);\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n top_k: this.settings.topK,\n\n // standardized settings:\n max_tokens: maxTokens ?? 4096, // 4096: max model output tokens\n temperature, // uses 0..1 scale\n top_p: topP,\n\n // prompt:\n system: messagesPrompt.system,\n messages: messagesPrompt.messages,\n };\n\n switch (type) {\n case 'regular': {\n // when the tools array is empty, change it to undefined to prevent OpenAI errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n\n return {\n args: {\n ...baseArgs,\n tools: tools?.map(tool => ({\n name: tool.name,\n description: tool.description,\n input_schema: tool.parameters,\n })),\n },\n warnings,\n };\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'json-mode object generation',\n });\n }\n\n case 'object-tool': {\n const { name, description, parameters } = mode.tool;\n\n // add instruction to use tool:\n baseArgs.messages[baseArgs.messages.length - 1].content.push({\n type: 'text',\n text: `\\n\\nUse the '${name}' tool.`,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: [{ name, description, input_schema: parameters }],\n },\n warnings,\n };\n }\n\n case 'object-grammar': {\n throw new UnsupportedFunctionalityError({\n functionality: 'grammar-mode object generation',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = await postJsonToApi({\n url: `${this.config.baseURL}/messages`,\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: anthropicFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n anthropicMessagesResponseSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n // extract text\n let text = '';\n for (const content of response.content) {\n if (content.type === 'text') {\n text += content.text;\n }\n }\n\n // extract tool calls\n let toolCalls: LanguageModelV1FunctionToolCall[] | undefined = undefined;\n if (response.content.some(content => content.type === 'tool_use')) {\n toolCalls = [];\n for (const content of response.content) {\n if (content.type === 'tool_use') {\n toolCalls.push({\n toolCallType: 'function',\n toolCallId: content.id,\n toolName: content.name,\n args: JSON.stringify(content.input),\n });\n }\n }\n }\n\n return {\n text,\n toolCalls,\n finishReason: mapAnthropicStopReason(response.stop_reason),\n usage: {\n promptTokens: response.usage.input_tokens,\n completionTokens: response.usage.output_tokens,\n },\n rawCall: { rawPrompt, rawSettings },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = await postJsonToApi({\n url: `${this.config.baseURL}/messages`,\n headers: this.config.headers(),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: anthropicFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n anthropicMessagesChunkSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'other';\n const usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof anthropicMessagesChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n switch (value.type) {\n case 'ping':\n case 'content_block_start':\n case 'content_block_stop': {\n return; // ignored\n }\n\n case 'content_block_delta': {\n controller.enqueue({\n type: 'text-delta',\n textDelta: value.delta.text,\n });\n return;\n }\n\n case 'message_start': {\n usage.promptTokens = value.message.usage.input_tokens;\n usage.completionTokens = value.message.usage.output_tokens;\n return;\n }\n\n case 'message_delta': {\n usage.completionTokens = value.usage.output_tokens;\n finishReason = mapAnthropicStopReason(value.delta.stop_reason);\n return;\n }\n\n case 'message_stop': {\n controller.enqueue({ type: 'finish', finishReason, usage });\n return;\n }\n\n default: {\n const _exhaustiveCheck: never = value;\n throw new Error(`Unsupported chunk type: ${_exhaustiveCheck}`);\n }\n }\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst anthropicMessagesResponseSchema = z.object({\n type: z.literal('message'),\n content: z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('tool_use'),\n id: z.string(),\n name: z.string(),\n input: z.unknown(),\n }),\n ]),\n ),\n stop_reason: z.string().optional().nullable(),\n usage: z.object({\n input_tokens: z.number(),\n output_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst anthropicMessagesChunkSchema = z.discriminatedUnion('type', [\n z.object({\n type: z.literal('message_start'),\n message: z.object({\n usage: z.object({\n input_tokens: z.number(),\n output_tokens: z.number(),\n }),\n }),\n }),\n z.object({\n type: z.literal('content_block_start'),\n index: z.number(),\n content_block: z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n }),\n z.object({\n type: z.literal('content_block_delta'),\n index: z.number(),\n delta: z.object({\n type: z.literal('text_delta'),\n text: z.string(),\n }),\n }),\n z.object({\n type: z.literal('content_block_stop'),\n index: z.number(),\n }),\n z.object({\n type: z.literal('message_delta'),\n delta: z.object({ stop_reason: z.string().optional().nullable() }),\n usage: z.object({ output_tokens: z.number() }),\n }),\n z.object({\n type: z.literal('message_stop'),\n }),\n z.object({\n type: z.literal('ping'),\n }),\n]);\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst anthropicErrorDataSchema = z.object({\n type: z.literal('error'),\n error: z.object({\n type: z.string(),\n message: z.string(),\n }),\n});\n\nexport type AnthropicErrorData = z.infer<typeof anthropicErrorDataSchema>;\n\nexport const anthropicFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: anthropicErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport {\n AnthropicMessage,\n AnthropicMessagesPrompt,\n} from './anthropic-messages-prompt';\n\nexport function convertToAnthropicMessagesPrompt(\n prompt: LanguageModelV1Prompt,\n): AnthropicMessagesPrompt {\n let system: string | undefined;\n const messages: AnthropicMessage[] = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n system = content;\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'image': {\n if (part.image instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality: 'URL image parts',\n });\n } else {\n return {\n type: 'image',\n source: {\n type: 'base64',\n media_type: part.mimeType ?? 'image/jpeg',\n data: convertUint8ArrayToBase64(part.image),\n },\n };\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n messages.push({\n role: 'assistant',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'tool-call': {\n return {\n type: 'tool_use',\n id: part.toolCallId,\n name: part.toolName,\n input: part.args,\n };\n }\n }\n }),\n });\n\n break;\n }\n case 'tool': {\n messages.push({\n role: 'user',\n content: content.map(part => ({\n type: 'tool_result',\n tool_use_id: part.toolCallId,\n content: JSON.stringify(part.result),\n is_error: part.isError,\n })),\n });\n\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return {\n system,\n messages,\n };\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapAnthropicStopReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'end_turn':\n case 'stop_sequence':\n return 'stop';\n case 'tool_use':\n return 'tool-calls';\n case 'max_tokens':\n return 'length';\n default:\n return 'other';\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,yBAAiD;;;ACAjD,IAAAC,mBAOO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;;;ACdlB,4BAA+C;AAC/C,iBAAkB;AAElB,IAAM,2BAA2B,aAAE,OAAO;AAAA,EACxC,MAAM,aAAE,QAAQ,OAAO;AAAA,EACvB,OAAO,aAAE,OAAO;AAAA,IACd,MAAM,aAAE,OAAO;AAAA,IACf,SAAS,aAAE,OAAO;AAAA,EACpB,CAAC;AACH,CAAC;AAIM,IAAM,qCAAiC,sDAA+B;AAAA,EAC3E,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD,sBAGO;AACP,IAAAC,yBAA0C;AAMnC,SAAS,iCACd,QACyB;AACzB,MAAI;AACJ,QAAM,WAA+B,CAAC;AAEtC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS;AACT;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AA1BvC;AA2BY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,SAAS;AACZ,oBAAI,KAAK,iBAAiB,KAAK;AAC7B,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eAAe;AAAA,kBACjB,CAAC;AAAA,gBACH,OAAO;AACL,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,QAAQ;AAAA,sBACN,MAAM;AAAA,sBACN,aAAY,UAAK,aAAL,YAAiB;AAAA,sBAC7B,UAAM,kDAA0B,KAAK,KAAK;AAAA,oBAC5C;AAAA,kBACF;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,IAAI,KAAK;AAAA,kBACT,MAAM,KAAK;AAAA,kBACX,OAAO,KAAK;AAAA,gBACd;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,WAAS;AAAA,YAC5B,MAAM;AAAA,YACN,aAAa,KAAK;AAAA,YAClB,SAAS,KAAK,UAAU,KAAK,MAAM;AAAA,YACnC,UAAU,KAAK;AAAA,UACjB,EAAE;AAAA,QACJ,CAAC;AAED;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;;;ACjGO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AHaO,IAAM,iCAAN,MAAgE;AAAA,EASrE,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA7DnD;AA8DI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,iBAAiB,iCAAiC,MAAM;AAE9D,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,OAAO,KAAK,SAAS;AAAA;AAAA,MAGrB,YAAY,gCAAa;AAAA;AAAA,MACzB;AAAA;AAAA,MACA,OAAO;AAAA;AAAA,MAGP,QAAQ,eAAe;AAAA,MACvB,UAAU,eAAe;AAAA,IAC3B;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AAEd,cAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAEhD,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,OAAO,+BAAO,IAAI,WAAS;AAAA,cACzB,MAAM,KAAK;AAAA,cACX,aAAa,KAAK;AAAA,cAClB,cAAc,KAAK;AAAA,YACrB;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,EAAE,MAAM,aAAa,WAAW,IAAI,KAAK;AAG/C,iBAAS,SAAS,SAAS,SAAS,SAAS,CAAC,EAAE,QAAQ,KAAK;AAAA,UAC3D,MAAM;AAAA,UACN,MAAM;AAAA;AAAA,WAAgB,IAAI;AAAA,QAC5B,CAAC;AAED,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,OAAO,CAAC,EAAE,MAAM,aAAa,cAAc,WAAW,CAAC;AAAA,UACzD;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,kBAAkB;AACrB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAW,UAAM,sCAAc;AAAA,MACnC,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAGhD,QAAI,OAAO;AACX,eAAW,WAAW,SAAS,SAAS;AACtC,UAAI,QAAQ,SAAS,QAAQ;AAC3B,gBAAQ,QAAQ;AAAA,MAClB;AAAA,IACF;AAGA,QAAI,YAA2D;AAC/D,QAAI,SAAS,QAAQ,KAAK,aAAW,QAAQ,SAAS,UAAU,GAAG;AACjE,kBAAY,CAAC;AACb,iBAAW,WAAW,SAAS,SAAS;AACtC,YAAI,QAAQ,SAAS,YAAY;AAC/B,oBAAU,KAAK;AAAA,YACb,cAAc;AAAA,YACd,YAAY,QAAQ;AAAA,YACpB,UAAU,QAAQ;AAAA,YAClB,MAAM,KAAK,UAAU,QAAQ,KAAK;AAAA,UACpC,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,cAAc,uBAAuB,SAAS,WAAW;AAAA,MACzD,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAW,UAAM,sCAAc;AAAA,MACnC,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,UAAM,QAA4D;AAAA,MAChE,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AAEA,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,oBAAQ,MAAM,MAAM;AAAA,cAClB,KAAK;AAAA,cACL,KAAK;AAAA,cACL,KAAK,sBAAsB;AACzB;AAAA,cACF;AAAA,cAEA,KAAK,uBAAuB;AAC1B,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,WAAW,MAAM,MAAM;AAAA,gBACzB,CAAC;AACD;AAAA,cACF;AAAA,cAEA,KAAK,iBAAiB;AACpB,sBAAM,eAAe,MAAM,QAAQ,MAAM;AACzC,sBAAM,mBAAmB,MAAM,QAAQ,MAAM;AAC7C;AAAA,cACF;AAAA,cAEA,KAAK,iBAAiB;AACpB,sBAAM,mBAAmB,MAAM,MAAM;AACrC,+BAAe,uBAAuB,MAAM,MAAM,WAAW;AAC7D;AAAA,cACF;AAAA,cAEA,KAAK,gBAAgB;AACnB,2BAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAC1D;AAAA,cACF;AAAA,cAEA,SAAS;AACP,sBAAM,mBAA0B;AAChC,sBAAM,IAAI,MAAM,2BAA2B,gBAAgB,EAAE;AAAA,cAC/D;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,kCAAkC,cAAE,OAAO;AAAA,EAC/C,MAAM,cAAE,QAAQ,SAAS;AAAA,EACzB,SAAS,cAAE;AAAA,IACT,cAAE,mBAAmB,QAAQ;AAAA,MAC3B,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,QAAQ,MAAM;AAAA,QACtB,MAAM,cAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,QAAQ,UAAU;AAAA,QAC1B,IAAI,cAAE,OAAO;AAAA,QACb,MAAM,cAAE,OAAO;AAAA,QACf,OAAO,cAAE,QAAQ;AAAA,MACnB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA,EACA,aAAa,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,EAC5C,OAAO,cAAE,OAAO;AAAA,IACd,cAAc,cAAE,OAAO;AAAA,IACvB,eAAe,cAAE,OAAO;AAAA,EAC1B,CAAC;AACH,CAAC;AAID,IAAM,+BAA+B,cAAE,mBAAmB,QAAQ;AAAA,EAChE,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,QAAQ,eAAe;AAAA,IAC/B,SAAS,cAAE,OAAO;AAAA,MAChB,OAAO,cAAE,OAAO;AAAA,QACd,cAAc,cAAE,OAAO;AAAA,QACvB,eAAe,cAAE,OAAO;AAAA,MAC1B,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,QAAQ,qBAAqB;AAAA,IACrC,OAAO,cAAE,OAAO;AAAA,IAChB,eAAe,cAAE,OAAO;AAAA,MACtB,MAAM,cAAE,QAAQ,MAAM;AAAA,MACtB,MAAM,cAAE,OAAO;AAAA,IACjB,CAAC;AAAA,EACH,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,QAAQ,qBAAqB;AAAA,IACrC,OAAO,cAAE,OAAO;AAAA,IAChB,OAAO,cAAE,OAAO;AAAA,MACd,MAAM,cAAE,QAAQ,YAAY;AAAA,MAC5B,MAAM,cAAE,OAAO;AAAA,IACjB,CAAC;AAAA,EACH,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,QAAQ,oBAAoB;AAAA,IACpC,OAAO,cAAE,OAAO;AAAA,EAClB,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,QAAQ,eAAe;AAAA,IAC/B,OAAO,cAAE,OAAO,EAAE,aAAa,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC;AAAA,IACjE,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC;AAAA,EAC/C,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,QAAQ,cAAc;AAAA,EAChC,CAAC;AAAA,EACD,cAAE,OAAO;AAAA,IACP,MAAM,cAAE,QAAQ,MAAM;AAAA,EACxB,CAAC;AACH,CAAC;;;ADzWM,IAAM,YAAN,MAAgB;AAAA;AAAA;AAAA;AAAA,EAWrB,YACE,UAiBI,CAAC,GACL;AAxCJ;AAyCI,SAAK,WACH,uDAAqB,aAAQ,YAAR,YAAmB,QAAQ,OAAO,MAAvD,YACA;AACF,SAAK,SAAS,QAAQ;AAAA,EACxB;AAAA,EAEA,IAAY,aAAa;AACvB,WAAO;AAAA,MACL,SAAS,KAAK;AAAA,MACd,SAAS,OAAO;AAAA,QACd,qBAAqB;AAAA,QACrB,kBAAkB;AAAA,QAClB,iBAAa,mCAAW;AAAA,UACtB,QAAQ,KAAK;AAAA,UACb,yBAAyB;AAAA,UACzB,aAAa;AAAA,QACf,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEA,SACE,SACA,WAAsC,CAAC,GACvC;AACA,WAAO,IAAI,+BAA+B,SAAS,UAAU;AAAA,MAC3D,UAAU;AAAA,MACV,GAAG,KAAK;AAAA,IACV,CAAC;AAAA,EACH;AACF;AAKO,IAAM,YAAY,IAAI,UAAU;","names":["import_provider_utils","import_provider","import_provider_utils","import_zod","import_provider_utils"]}
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,456 @@
|
|
|
1
|
+
// src/anthropic-facade.ts
|
|
2
|
+
import { loadApiKey, withoutTrailingSlash } from "@ai-sdk/provider-utils";
|
|
3
|
+
|
|
4
|
+
// src/anthropic-messages-language-model.ts
|
|
5
|
+
import {
|
|
6
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
7
|
+
} from "@ai-sdk/provider";
|
|
8
|
+
import {
|
|
9
|
+
createEventSourceResponseHandler,
|
|
10
|
+
createJsonResponseHandler,
|
|
11
|
+
postJsonToApi
|
|
12
|
+
} from "@ai-sdk/provider-utils";
|
|
13
|
+
import { z as z2 } from "zod";
|
|
14
|
+
|
|
15
|
+
// src/anthropic-error.ts
|
|
16
|
+
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
17
|
+
import { z } from "zod";
|
|
18
|
+
var anthropicErrorDataSchema = z.object({
|
|
19
|
+
type: z.literal("error"),
|
|
20
|
+
error: z.object({
|
|
21
|
+
type: z.string(),
|
|
22
|
+
message: z.string()
|
|
23
|
+
})
|
|
24
|
+
});
|
|
25
|
+
var anthropicFailedResponseHandler = createJsonErrorResponseHandler({
|
|
26
|
+
errorSchema: anthropicErrorDataSchema,
|
|
27
|
+
errorToMessage: (data) => data.error.message
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
// src/convert-to-anthropic-messages-prompt.ts
|
|
31
|
+
import {
|
|
32
|
+
UnsupportedFunctionalityError
|
|
33
|
+
} from "@ai-sdk/provider";
|
|
34
|
+
import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
|
|
35
|
+
function convertToAnthropicMessagesPrompt(prompt) {
|
|
36
|
+
let system;
|
|
37
|
+
const messages = [];
|
|
38
|
+
for (const { role, content } of prompt) {
|
|
39
|
+
switch (role) {
|
|
40
|
+
case "system": {
|
|
41
|
+
system = content;
|
|
42
|
+
break;
|
|
43
|
+
}
|
|
44
|
+
case "user": {
|
|
45
|
+
messages.push({
|
|
46
|
+
role: "user",
|
|
47
|
+
content: content.map((part) => {
|
|
48
|
+
var _a;
|
|
49
|
+
switch (part.type) {
|
|
50
|
+
case "text": {
|
|
51
|
+
return { type: "text", text: part.text };
|
|
52
|
+
}
|
|
53
|
+
case "image": {
|
|
54
|
+
if (part.image instanceof URL) {
|
|
55
|
+
throw new UnsupportedFunctionalityError({
|
|
56
|
+
functionality: "URL image parts"
|
|
57
|
+
});
|
|
58
|
+
} else {
|
|
59
|
+
return {
|
|
60
|
+
type: "image",
|
|
61
|
+
source: {
|
|
62
|
+
type: "base64",
|
|
63
|
+
media_type: (_a = part.mimeType) != null ? _a : "image/jpeg",
|
|
64
|
+
data: convertUint8ArrayToBase64(part.image)
|
|
65
|
+
}
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
})
|
|
71
|
+
});
|
|
72
|
+
break;
|
|
73
|
+
}
|
|
74
|
+
case "assistant": {
|
|
75
|
+
messages.push({
|
|
76
|
+
role: "assistant",
|
|
77
|
+
content: content.map((part) => {
|
|
78
|
+
switch (part.type) {
|
|
79
|
+
case "text": {
|
|
80
|
+
return { type: "text", text: part.text };
|
|
81
|
+
}
|
|
82
|
+
case "tool-call": {
|
|
83
|
+
return {
|
|
84
|
+
type: "tool_use",
|
|
85
|
+
id: part.toolCallId,
|
|
86
|
+
name: part.toolName,
|
|
87
|
+
input: part.args
|
|
88
|
+
};
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
})
|
|
92
|
+
});
|
|
93
|
+
break;
|
|
94
|
+
}
|
|
95
|
+
case "tool": {
|
|
96
|
+
messages.push({
|
|
97
|
+
role: "user",
|
|
98
|
+
content: content.map((part) => ({
|
|
99
|
+
type: "tool_result",
|
|
100
|
+
tool_use_id: part.toolCallId,
|
|
101
|
+
content: JSON.stringify(part.result),
|
|
102
|
+
is_error: part.isError
|
|
103
|
+
}))
|
|
104
|
+
});
|
|
105
|
+
break;
|
|
106
|
+
}
|
|
107
|
+
default: {
|
|
108
|
+
const _exhaustiveCheck = role;
|
|
109
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
return {
|
|
114
|
+
system,
|
|
115
|
+
messages
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// src/map-anthropic-stop-reason.ts
|
|
120
|
+
function mapAnthropicStopReason(finishReason) {
|
|
121
|
+
switch (finishReason) {
|
|
122
|
+
case "end_turn":
|
|
123
|
+
case "stop_sequence":
|
|
124
|
+
return "stop";
|
|
125
|
+
case "tool_use":
|
|
126
|
+
return "tool-calls";
|
|
127
|
+
case "max_tokens":
|
|
128
|
+
return "length";
|
|
129
|
+
default:
|
|
130
|
+
return "other";
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
// src/anthropic-messages-language-model.ts
|
|
135
|
+
var AnthropicMessagesLanguageModel = class {
|
|
136
|
+
constructor(modelId, settings, config) {
|
|
137
|
+
this.specificationVersion = "v1";
|
|
138
|
+
this.defaultObjectGenerationMode = "tool";
|
|
139
|
+
this.modelId = modelId;
|
|
140
|
+
this.settings = settings;
|
|
141
|
+
this.config = config;
|
|
142
|
+
}
|
|
143
|
+
get provider() {
|
|
144
|
+
return this.config.provider;
|
|
145
|
+
}
|
|
146
|
+
getArgs({
|
|
147
|
+
mode,
|
|
148
|
+
prompt,
|
|
149
|
+
maxTokens,
|
|
150
|
+
temperature,
|
|
151
|
+
topP,
|
|
152
|
+
frequencyPenalty,
|
|
153
|
+
presencePenalty,
|
|
154
|
+
seed
|
|
155
|
+
}) {
|
|
156
|
+
var _a;
|
|
157
|
+
const type = mode.type;
|
|
158
|
+
const warnings = [];
|
|
159
|
+
if (frequencyPenalty != null) {
|
|
160
|
+
warnings.push({
|
|
161
|
+
type: "unsupported-setting",
|
|
162
|
+
setting: "frequencyPenalty"
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
if (presencePenalty != null) {
|
|
166
|
+
warnings.push({
|
|
167
|
+
type: "unsupported-setting",
|
|
168
|
+
setting: "presencePenalty"
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
if (seed != null) {
|
|
172
|
+
warnings.push({
|
|
173
|
+
type: "unsupported-setting",
|
|
174
|
+
setting: "seed"
|
|
175
|
+
});
|
|
176
|
+
}
|
|
177
|
+
const messagesPrompt = convertToAnthropicMessagesPrompt(prompt);
|
|
178
|
+
const baseArgs = {
|
|
179
|
+
// model id:
|
|
180
|
+
model: this.modelId,
|
|
181
|
+
// model specific settings:
|
|
182
|
+
top_k: this.settings.topK,
|
|
183
|
+
// standardized settings:
|
|
184
|
+
max_tokens: maxTokens != null ? maxTokens : 4096,
|
|
185
|
+
// 4096: max model output tokens
|
|
186
|
+
temperature,
|
|
187
|
+
// uses 0..1 scale
|
|
188
|
+
top_p: topP,
|
|
189
|
+
// prompt:
|
|
190
|
+
system: messagesPrompt.system,
|
|
191
|
+
messages: messagesPrompt.messages
|
|
192
|
+
};
|
|
193
|
+
switch (type) {
|
|
194
|
+
case "regular": {
|
|
195
|
+
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
196
|
+
return {
|
|
197
|
+
args: {
|
|
198
|
+
...baseArgs,
|
|
199
|
+
tools: tools == null ? void 0 : tools.map((tool) => ({
|
|
200
|
+
name: tool.name,
|
|
201
|
+
description: tool.description,
|
|
202
|
+
input_schema: tool.parameters
|
|
203
|
+
}))
|
|
204
|
+
},
|
|
205
|
+
warnings
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
case "object-json": {
|
|
209
|
+
throw new UnsupportedFunctionalityError2({
|
|
210
|
+
functionality: "json-mode object generation"
|
|
211
|
+
});
|
|
212
|
+
}
|
|
213
|
+
case "object-tool": {
|
|
214
|
+
const { name, description, parameters } = mode.tool;
|
|
215
|
+
baseArgs.messages[baseArgs.messages.length - 1].content.push({
|
|
216
|
+
type: "text",
|
|
217
|
+
text: `
|
|
218
|
+
|
|
219
|
+
Use the '${name}' tool.`
|
|
220
|
+
});
|
|
221
|
+
return {
|
|
222
|
+
args: {
|
|
223
|
+
...baseArgs,
|
|
224
|
+
tools: [{ name, description, input_schema: parameters }]
|
|
225
|
+
},
|
|
226
|
+
warnings
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
case "object-grammar": {
|
|
230
|
+
throw new UnsupportedFunctionalityError2({
|
|
231
|
+
functionality: "grammar-mode object generation"
|
|
232
|
+
});
|
|
233
|
+
}
|
|
234
|
+
default: {
|
|
235
|
+
const _exhaustiveCheck = type;
|
|
236
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
async doGenerate(options) {
|
|
241
|
+
const { args, warnings } = this.getArgs(options);
|
|
242
|
+
const response = await postJsonToApi({
|
|
243
|
+
url: `${this.config.baseURL}/messages`,
|
|
244
|
+
headers: this.config.headers(),
|
|
245
|
+
body: args,
|
|
246
|
+
failedResponseHandler: anthropicFailedResponseHandler,
|
|
247
|
+
successfulResponseHandler: createJsonResponseHandler(
|
|
248
|
+
anthropicMessagesResponseSchema
|
|
249
|
+
),
|
|
250
|
+
abortSignal: options.abortSignal
|
|
251
|
+
});
|
|
252
|
+
const { messages: rawPrompt, ...rawSettings } = args;
|
|
253
|
+
let text = "";
|
|
254
|
+
for (const content of response.content) {
|
|
255
|
+
if (content.type === "text") {
|
|
256
|
+
text += content.text;
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
let toolCalls = void 0;
|
|
260
|
+
if (response.content.some((content) => content.type === "tool_use")) {
|
|
261
|
+
toolCalls = [];
|
|
262
|
+
for (const content of response.content) {
|
|
263
|
+
if (content.type === "tool_use") {
|
|
264
|
+
toolCalls.push({
|
|
265
|
+
toolCallType: "function",
|
|
266
|
+
toolCallId: content.id,
|
|
267
|
+
toolName: content.name,
|
|
268
|
+
args: JSON.stringify(content.input)
|
|
269
|
+
});
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
return {
|
|
274
|
+
text,
|
|
275
|
+
toolCalls,
|
|
276
|
+
finishReason: mapAnthropicStopReason(response.stop_reason),
|
|
277
|
+
usage: {
|
|
278
|
+
promptTokens: response.usage.input_tokens,
|
|
279
|
+
completionTokens: response.usage.output_tokens
|
|
280
|
+
},
|
|
281
|
+
rawCall: { rawPrompt, rawSettings },
|
|
282
|
+
warnings
|
|
283
|
+
};
|
|
284
|
+
}
|
|
285
|
+
async doStream(options) {
|
|
286
|
+
const { args, warnings } = this.getArgs(options);
|
|
287
|
+
const response = await postJsonToApi({
|
|
288
|
+
url: `${this.config.baseURL}/messages`,
|
|
289
|
+
headers: this.config.headers(),
|
|
290
|
+
body: {
|
|
291
|
+
...args,
|
|
292
|
+
stream: true
|
|
293
|
+
},
|
|
294
|
+
failedResponseHandler: anthropicFailedResponseHandler,
|
|
295
|
+
successfulResponseHandler: createEventSourceResponseHandler(
|
|
296
|
+
anthropicMessagesChunkSchema
|
|
297
|
+
),
|
|
298
|
+
abortSignal: options.abortSignal
|
|
299
|
+
});
|
|
300
|
+
const { messages: rawPrompt, ...rawSettings } = args;
|
|
301
|
+
let finishReason = "other";
|
|
302
|
+
const usage = {
|
|
303
|
+
promptTokens: Number.NaN,
|
|
304
|
+
completionTokens: Number.NaN
|
|
305
|
+
};
|
|
306
|
+
return {
|
|
307
|
+
stream: response.pipeThrough(
|
|
308
|
+
new TransformStream({
|
|
309
|
+
transform(chunk, controller) {
|
|
310
|
+
if (!chunk.success) {
|
|
311
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
const value = chunk.value;
|
|
315
|
+
switch (value.type) {
|
|
316
|
+
case "ping":
|
|
317
|
+
case "content_block_start":
|
|
318
|
+
case "content_block_stop": {
|
|
319
|
+
return;
|
|
320
|
+
}
|
|
321
|
+
case "content_block_delta": {
|
|
322
|
+
controller.enqueue({
|
|
323
|
+
type: "text-delta",
|
|
324
|
+
textDelta: value.delta.text
|
|
325
|
+
});
|
|
326
|
+
return;
|
|
327
|
+
}
|
|
328
|
+
case "message_start": {
|
|
329
|
+
usage.promptTokens = value.message.usage.input_tokens;
|
|
330
|
+
usage.completionTokens = value.message.usage.output_tokens;
|
|
331
|
+
return;
|
|
332
|
+
}
|
|
333
|
+
case "message_delta": {
|
|
334
|
+
usage.completionTokens = value.usage.output_tokens;
|
|
335
|
+
finishReason = mapAnthropicStopReason(value.delta.stop_reason);
|
|
336
|
+
return;
|
|
337
|
+
}
|
|
338
|
+
case "message_stop": {
|
|
339
|
+
controller.enqueue({ type: "finish", finishReason, usage });
|
|
340
|
+
return;
|
|
341
|
+
}
|
|
342
|
+
default: {
|
|
343
|
+
const _exhaustiveCheck = value;
|
|
344
|
+
throw new Error(`Unsupported chunk type: ${_exhaustiveCheck}`);
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
})
|
|
349
|
+
),
|
|
350
|
+
rawCall: { rawPrompt, rawSettings },
|
|
351
|
+
warnings
|
|
352
|
+
};
|
|
353
|
+
}
|
|
354
|
+
};
|
|
355
|
+
var anthropicMessagesResponseSchema = z2.object({
|
|
356
|
+
type: z2.literal("message"),
|
|
357
|
+
content: z2.array(
|
|
358
|
+
z2.discriminatedUnion("type", [
|
|
359
|
+
z2.object({
|
|
360
|
+
type: z2.literal("text"),
|
|
361
|
+
text: z2.string()
|
|
362
|
+
}),
|
|
363
|
+
z2.object({
|
|
364
|
+
type: z2.literal("tool_use"),
|
|
365
|
+
id: z2.string(),
|
|
366
|
+
name: z2.string(),
|
|
367
|
+
input: z2.unknown()
|
|
368
|
+
})
|
|
369
|
+
])
|
|
370
|
+
),
|
|
371
|
+
stop_reason: z2.string().optional().nullable(),
|
|
372
|
+
usage: z2.object({
|
|
373
|
+
input_tokens: z2.number(),
|
|
374
|
+
output_tokens: z2.number()
|
|
375
|
+
})
|
|
376
|
+
});
|
|
377
|
+
var anthropicMessagesChunkSchema = z2.discriminatedUnion("type", [
|
|
378
|
+
z2.object({
|
|
379
|
+
type: z2.literal("message_start"),
|
|
380
|
+
message: z2.object({
|
|
381
|
+
usage: z2.object({
|
|
382
|
+
input_tokens: z2.number(),
|
|
383
|
+
output_tokens: z2.number()
|
|
384
|
+
})
|
|
385
|
+
})
|
|
386
|
+
}),
|
|
387
|
+
z2.object({
|
|
388
|
+
type: z2.literal("content_block_start"),
|
|
389
|
+
index: z2.number(),
|
|
390
|
+
content_block: z2.object({
|
|
391
|
+
type: z2.literal("text"),
|
|
392
|
+
text: z2.string()
|
|
393
|
+
})
|
|
394
|
+
}),
|
|
395
|
+
z2.object({
|
|
396
|
+
type: z2.literal("content_block_delta"),
|
|
397
|
+
index: z2.number(),
|
|
398
|
+
delta: z2.object({
|
|
399
|
+
type: z2.literal("text_delta"),
|
|
400
|
+
text: z2.string()
|
|
401
|
+
})
|
|
402
|
+
}),
|
|
403
|
+
z2.object({
|
|
404
|
+
type: z2.literal("content_block_stop"),
|
|
405
|
+
index: z2.number()
|
|
406
|
+
}),
|
|
407
|
+
z2.object({
|
|
408
|
+
type: z2.literal("message_delta"),
|
|
409
|
+
delta: z2.object({ stop_reason: z2.string().optional().nullable() }),
|
|
410
|
+
usage: z2.object({ output_tokens: z2.number() })
|
|
411
|
+
}),
|
|
412
|
+
z2.object({
|
|
413
|
+
type: z2.literal("message_stop")
|
|
414
|
+
}),
|
|
415
|
+
z2.object({
|
|
416
|
+
type: z2.literal("ping")
|
|
417
|
+
})
|
|
418
|
+
]);
|
|
419
|
+
|
|
420
|
+
// src/anthropic-facade.ts
|
|
421
|
+
var Anthropic = class {
|
|
422
|
+
/**
|
|
423
|
+
* Creates a new Anthropic provider instance.
|
|
424
|
+
*/
|
|
425
|
+
constructor(options = {}) {
|
|
426
|
+
var _a, _b;
|
|
427
|
+
this.baseURL = (_b = withoutTrailingSlash((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://api.anthropic.com/v1";
|
|
428
|
+
this.apiKey = options.apiKey;
|
|
429
|
+
}
|
|
430
|
+
get baseConfig() {
|
|
431
|
+
return {
|
|
432
|
+
baseURL: this.baseURL,
|
|
433
|
+
headers: () => ({
|
|
434
|
+
"anthropic-version": "2023-06-01",
|
|
435
|
+
"anthropic-beta": "tools-2024-04-04",
|
|
436
|
+
"x-api-key": loadApiKey({
|
|
437
|
+
apiKey: this.apiKey,
|
|
438
|
+
environmentVariableName: "ANTHROPIC_API_KEY",
|
|
439
|
+
description: "Anthropic"
|
|
440
|
+
})
|
|
441
|
+
})
|
|
442
|
+
};
|
|
443
|
+
}
|
|
444
|
+
messages(modelId, settings = {}) {
|
|
445
|
+
return new AnthropicMessagesLanguageModel(modelId, settings, {
|
|
446
|
+
provider: "anthropic.messages",
|
|
447
|
+
...this.baseConfig
|
|
448
|
+
});
|
|
449
|
+
}
|
|
450
|
+
};
|
|
451
|
+
var anthropic = new Anthropic();
|
|
452
|
+
export {
|
|
453
|
+
Anthropic,
|
|
454
|
+
anthropic
|
|
455
|
+
};
|
|
456
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/anthropic-facade.ts","../src/anthropic-messages-language-model.ts","../src/anthropic-error.ts","../src/convert-to-anthropic-messages-prompt.ts","../src/map-anthropic-stop-reason.ts"],"sourcesContent":["import { loadApiKey, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { AnthropicMessagesLanguageModel } from './anthropic-messages-language-model';\nimport {\n AnthropicMessagesModelId,\n AnthropicMessagesSettings,\n} from './anthropic-messages-settings';\n\n/**\n * Anthropic provider.\n */\nexport class Anthropic {\n /**\n * Base URL for the Anthropic API calls.\n */\n readonly baseURL: string;\n\n readonly apiKey?: string;\n\n /**\n * Creates a new Anthropic provider instance.\n */\n constructor(\n options: {\n /**\n * Base URL for the Anthropic API calls.\n */\n baseURL?: string;\n\n /**\n * @deprecated Use `baseURL` instead.\n */\n baseUrl?: string;\n\n /**\n * API key for authenticating requests.\n */\n apiKey?: string;\n\n generateId?: () => string;\n } = {},\n ) {\n this.baseURL =\n withoutTrailingSlash(options.baseURL ?? options.baseUrl) ??\n 'https://api.anthropic.com/v1';\n this.apiKey = options.apiKey;\n }\n\n private get baseConfig() {\n return {\n baseURL: this.baseURL,\n headers: () => ({\n 'anthropic-version': '2023-06-01',\n 'anthropic-beta': 'tools-2024-04-04',\n 'x-api-key': loadApiKey({\n apiKey: this.apiKey,\n environmentVariableName: 'ANTHROPIC_API_KEY',\n description: 'Anthropic',\n }),\n }),\n };\n }\n\n messages(\n modelId: AnthropicMessagesModelId,\n settings: AnthropicMessagesSettings = {},\n ) {\n return new AnthropicMessagesLanguageModel(modelId, settings, {\n provider: 'anthropic.messages',\n ...this.baseConfig,\n });\n }\n}\n\n/**\n * Default Anthropic provider instance.\n */\nexport const anthropic = new Anthropic();\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1FunctionToolCall,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { anthropicFailedResponseHandler } from './anthropic-error';\nimport {\n AnthropicMessagesModelId,\n AnthropicMessagesSettings,\n} from './anthropic-messages-settings';\nimport { convertToAnthropicMessagesPrompt } from './convert-to-anthropic-messages-prompt';\nimport { mapAnthropicStopReason } from './map-anthropic-stop-reason';\n\ntype AnthropicMessagesConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n};\n\nexport class AnthropicMessagesLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'tool';\n\n readonly modelId: AnthropicMessagesModelId;\n readonly settings: AnthropicMessagesSettings;\n\n private readonly config: AnthropicMessagesConfig;\n\n constructor(\n modelId: AnthropicMessagesModelId,\n settings: AnthropicMessagesSettings,\n config: AnthropicMessagesConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (seed != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'seed',\n });\n }\n\n const messagesPrompt = convertToAnthropicMessagesPrompt(prompt);\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n top_k: this.settings.topK,\n\n // standardized settings:\n max_tokens: maxTokens ?? 4096, // 4096: max model output tokens\n temperature, // uses 0..1 scale\n top_p: topP,\n\n // prompt:\n system: messagesPrompt.system,\n messages: messagesPrompt.messages,\n };\n\n switch (type) {\n case 'regular': {\n // when the tools array is empty, change it to undefined to prevent OpenAI errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n\n return {\n args: {\n ...baseArgs,\n tools: tools?.map(tool => ({\n name: tool.name,\n description: tool.description,\n input_schema: tool.parameters,\n })),\n },\n warnings,\n };\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'json-mode object generation',\n });\n }\n\n case 'object-tool': {\n const { name, description, parameters } = mode.tool;\n\n // add instruction to use tool:\n baseArgs.messages[baseArgs.messages.length - 1].content.push({\n type: 'text',\n text: `\\n\\nUse the '${name}' tool.`,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: [{ name, description, input_schema: parameters }],\n },\n warnings,\n };\n }\n\n case 'object-grammar': {\n throw new UnsupportedFunctionalityError({\n functionality: 'grammar-mode object generation',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = await postJsonToApi({\n url: `${this.config.baseURL}/messages`,\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: anthropicFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n anthropicMessagesResponseSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n // extract text\n let text = '';\n for (const content of response.content) {\n if (content.type === 'text') {\n text += content.text;\n }\n }\n\n // extract tool calls\n let toolCalls: LanguageModelV1FunctionToolCall[] | undefined = undefined;\n if (response.content.some(content => content.type === 'tool_use')) {\n toolCalls = [];\n for (const content of response.content) {\n if (content.type === 'tool_use') {\n toolCalls.push({\n toolCallType: 'function',\n toolCallId: content.id,\n toolName: content.name,\n args: JSON.stringify(content.input),\n });\n }\n }\n }\n\n return {\n text,\n toolCalls,\n finishReason: mapAnthropicStopReason(response.stop_reason),\n usage: {\n promptTokens: response.usage.input_tokens,\n completionTokens: response.usage.output_tokens,\n },\n rawCall: { rawPrompt, rawSettings },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = await postJsonToApi({\n url: `${this.config.baseURL}/messages`,\n headers: this.config.headers(),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: anthropicFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n anthropicMessagesChunkSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'other';\n const usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof anthropicMessagesChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n switch (value.type) {\n case 'ping':\n case 'content_block_start':\n case 'content_block_stop': {\n return; // ignored\n }\n\n case 'content_block_delta': {\n controller.enqueue({\n type: 'text-delta',\n textDelta: value.delta.text,\n });\n return;\n }\n\n case 'message_start': {\n usage.promptTokens = value.message.usage.input_tokens;\n usage.completionTokens = value.message.usage.output_tokens;\n return;\n }\n\n case 'message_delta': {\n usage.completionTokens = value.usage.output_tokens;\n finishReason = mapAnthropicStopReason(value.delta.stop_reason);\n return;\n }\n\n case 'message_stop': {\n controller.enqueue({ type: 'finish', finishReason, usage });\n return;\n }\n\n default: {\n const _exhaustiveCheck: never = value;\n throw new Error(`Unsupported chunk type: ${_exhaustiveCheck}`);\n }\n }\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst anthropicMessagesResponseSchema = z.object({\n type: z.literal('message'),\n content: z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('tool_use'),\n id: z.string(),\n name: z.string(),\n input: z.unknown(),\n }),\n ]),\n ),\n stop_reason: z.string().optional().nullable(),\n usage: z.object({\n input_tokens: z.number(),\n output_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst anthropicMessagesChunkSchema = z.discriminatedUnion('type', [\n z.object({\n type: z.literal('message_start'),\n message: z.object({\n usage: z.object({\n input_tokens: z.number(),\n output_tokens: z.number(),\n }),\n }),\n }),\n z.object({\n type: z.literal('content_block_start'),\n index: z.number(),\n content_block: z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n }),\n z.object({\n type: z.literal('content_block_delta'),\n index: z.number(),\n delta: z.object({\n type: z.literal('text_delta'),\n text: z.string(),\n }),\n }),\n z.object({\n type: z.literal('content_block_stop'),\n index: z.number(),\n }),\n z.object({\n type: z.literal('message_delta'),\n delta: z.object({ stop_reason: z.string().optional().nullable() }),\n usage: z.object({ output_tokens: z.number() }),\n }),\n z.object({\n type: z.literal('message_stop'),\n }),\n z.object({\n type: z.literal('ping'),\n }),\n]);\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst anthropicErrorDataSchema = z.object({\n type: z.literal('error'),\n error: z.object({\n type: z.string(),\n message: z.string(),\n }),\n});\n\nexport type AnthropicErrorData = z.infer<typeof anthropicErrorDataSchema>;\n\nexport const anthropicFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: anthropicErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport {\n AnthropicMessage,\n AnthropicMessagesPrompt,\n} from './anthropic-messages-prompt';\n\nexport function convertToAnthropicMessagesPrompt(\n prompt: LanguageModelV1Prompt,\n): AnthropicMessagesPrompt {\n let system: string | undefined;\n const messages: AnthropicMessage[] = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n system = content;\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'image': {\n if (part.image instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality: 'URL image parts',\n });\n } else {\n return {\n type: 'image',\n source: {\n type: 'base64',\n media_type: part.mimeType ?? 'image/jpeg',\n data: convertUint8ArrayToBase64(part.image),\n },\n };\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n messages.push({\n role: 'assistant',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'tool-call': {\n return {\n type: 'tool_use',\n id: part.toolCallId,\n name: part.toolName,\n input: part.args,\n };\n }\n }\n }),\n });\n\n break;\n }\n case 'tool': {\n messages.push({\n role: 'user',\n content: content.map(part => ({\n type: 'tool_result',\n tool_use_id: part.toolCallId,\n content: JSON.stringify(part.result),\n is_error: part.isError,\n })),\n });\n\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return {\n system,\n messages,\n };\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapAnthropicStopReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'end_turn':\n case 'stop_sequence':\n return 'stop';\n case 'tool_use':\n return 'tool-calls';\n case 'max_tokens':\n return 'length';\n default:\n return 'other';\n }\n}\n"],"mappings":";AAAA,SAAS,YAAY,4BAA4B;;;ACAjD;AAAA,EAME,iCAAAA;AAAA,OACK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;;;ACdlB,SAAS,sCAAsC;AAC/C,SAAS,SAAS;AAElB,IAAM,2BAA2B,EAAE,OAAO;AAAA,EACxC,MAAM,EAAE,QAAQ,OAAO;AAAA,EACvB,OAAO,EAAE,OAAO;AAAA,IACd,MAAM,EAAE,OAAO;AAAA,IACf,SAAS,EAAE,OAAO;AAAA,EACpB,CAAC;AACH,CAAC;AAIM,IAAM,iCAAiC,+BAA+B;AAAA,EAC3E,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD;AAAA,EAEE;AAAA,OACK;AACP,SAAS,iCAAiC;AAMnC,SAAS,iCACd,QACyB;AACzB,MAAI;AACJ,QAAM,WAA+B,CAAC;AAEtC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS;AACT;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AA1BvC;AA2BY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,SAAS;AACZ,oBAAI,KAAK,iBAAiB,KAAK;AAC7B,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eAAe;AAAA,kBACjB,CAAC;AAAA,gBACH,OAAO;AACL,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,QAAQ;AAAA,sBACN,MAAM;AAAA,sBACN,aAAY,UAAK,aAAL,YAAiB;AAAA,sBAC7B,MAAM,0BAA0B,KAAK,KAAK;AAAA,oBAC5C;AAAA,kBACF;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,IAAI,KAAK;AAAA,kBACT,MAAM,KAAK;AAAA,kBACX,OAAO,KAAK;AAAA,gBACd;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,WAAS;AAAA,YAC5B,MAAM;AAAA,YACN,aAAa,KAAK;AAAA,YAClB,SAAS,KAAK,UAAU,KAAK,MAAM;AAAA,YACnC,UAAU,KAAK;AAAA,UACjB,EAAE;AAAA,QACJ,CAAC;AAED;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;;;ACjGO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AHaO,IAAM,iCAAN,MAAgE;AAAA,EASrE,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA7DnD;AA8DI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,iBAAiB,iCAAiC,MAAM;AAE9D,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,OAAO,KAAK,SAAS;AAAA;AAAA,MAGrB,YAAY,gCAAa;AAAA;AAAA,MACzB;AAAA;AAAA,MACA,OAAO;AAAA;AAAA,MAGP,QAAQ,eAAe;AAAA,MACvB,UAAU,eAAe;AAAA,IAC3B;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AAEd,cAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAEhD,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,OAAO,+BAAO,IAAI,WAAS;AAAA,cACzB,MAAM,KAAK;AAAA,cACX,aAAa,KAAK;AAAA,cAClB,cAAc,KAAK;AAAA,YACrB;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAIC,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,EAAE,MAAM,aAAa,WAAW,IAAI,KAAK;AAG/C,iBAAS,SAAS,SAAS,SAAS,SAAS,CAAC,EAAE,QAAQ,KAAK;AAAA,UAC3D,MAAM;AAAA,UACN,MAAM;AAAA;AAAA,WAAgB,IAAI;AAAA,QAC5B,CAAC;AAED,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,OAAO,CAAC,EAAE,MAAM,aAAa,cAAc,WAAW,CAAC;AAAA,UACzD;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,kBAAkB;AACrB,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAW,MAAM,cAAc;AAAA,MACnC,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAGhD,QAAI,OAAO;AACX,eAAW,WAAW,SAAS,SAAS;AACtC,UAAI,QAAQ,SAAS,QAAQ;AAC3B,gBAAQ,QAAQ;AAAA,MAClB;AAAA,IACF;AAGA,QAAI,YAA2D;AAC/D,QAAI,SAAS,QAAQ,KAAK,aAAW,QAAQ,SAAS,UAAU,GAAG;AACjE,kBAAY,CAAC;AACb,iBAAW,WAAW,SAAS,SAAS;AACtC,YAAI,QAAQ,SAAS,YAAY;AAC/B,oBAAU,KAAK;AAAA,YACb,cAAc;AAAA,YACd,YAAY,QAAQ;AAAA,YACpB,UAAU,QAAQ;AAAA,YAClB,MAAM,KAAK,UAAU,QAAQ,KAAK;AAAA,UACpC,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,cAAc,uBAAuB,SAAS,WAAW;AAAA,MACzD,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAW,MAAM,cAAc;AAAA,MACnC,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,UAAM,QAA4D;AAAA,MAChE,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AAEA,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,oBAAQ,MAAM,MAAM;AAAA,cAClB,KAAK;AAAA,cACL,KAAK;AAAA,cACL,KAAK,sBAAsB;AACzB;AAAA,cACF;AAAA,cAEA,KAAK,uBAAuB;AAC1B,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,WAAW,MAAM,MAAM;AAAA,gBACzB,CAAC;AACD;AAAA,cACF;AAAA,cAEA,KAAK,iBAAiB;AACpB,sBAAM,eAAe,MAAM,QAAQ,MAAM;AACzC,sBAAM,mBAAmB,MAAM,QAAQ,MAAM;AAC7C;AAAA,cACF;AAAA,cAEA,KAAK,iBAAiB;AACpB,sBAAM,mBAAmB,MAAM,MAAM;AACrC,+BAAe,uBAAuB,MAAM,MAAM,WAAW;AAC7D;AAAA,cACF;AAAA,cAEA,KAAK,gBAAgB;AACnB,2BAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAC1D;AAAA,cACF;AAAA,cAEA,SAAS;AACP,sBAAM,mBAA0B;AAChC,sBAAM,IAAI,MAAM,2BAA2B,gBAAgB,EAAE;AAAA,cAC/D;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,kCAAkCC,GAAE,OAAO;AAAA,EAC/C,MAAMA,GAAE,QAAQ,SAAS;AAAA,EACzB,SAASA,GAAE;AAAA,IACTA,GAAE,mBAAmB,QAAQ;AAAA,MAC3BA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,MAAM;AAAA,QACtB,MAAMA,GAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,UAAU;AAAA,QAC1B,IAAIA,GAAE,OAAO;AAAA,QACb,MAAMA,GAAE,OAAO;AAAA,QACf,OAAOA,GAAE,QAAQ;AAAA,MACnB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA,EACA,aAAaA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,EAC5C,OAAOA,GAAE,OAAO;AAAA,IACd,cAAcA,GAAE,OAAO;AAAA,IACvB,eAAeA,GAAE,OAAO;AAAA,EAC1B,CAAC;AACH,CAAC;AAID,IAAM,+BAA+BA,GAAE,mBAAmB,QAAQ;AAAA,EAChEA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,QAAQ,eAAe;AAAA,IAC/B,SAASA,GAAE,OAAO;AAAA,MAChB,OAAOA,GAAE,OAAO;AAAA,QACd,cAAcA,GAAE,OAAO;AAAA,QACvB,eAAeA,GAAE,OAAO;AAAA,MAC1B,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,QAAQ,qBAAqB;AAAA,IACrC,OAAOA,GAAE,OAAO;AAAA,IAChB,eAAeA,GAAE,OAAO;AAAA,MACtB,MAAMA,GAAE,QAAQ,MAAM;AAAA,MACtB,MAAMA,GAAE,OAAO;AAAA,IACjB,CAAC;AAAA,EACH,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,QAAQ,qBAAqB;AAAA,IACrC,OAAOA,GAAE,OAAO;AAAA,IAChB,OAAOA,GAAE,OAAO;AAAA,MACd,MAAMA,GAAE,QAAQ,YAAY;AAAA,MAC5B,MAAMA,GAAE,OAAO;AAAA,IACjB,CAAC;AAAA,EACH,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,QAAQ,oBAAoB;AAAA,IACpC,OAAOA,GAAE,OAAO;AAAA,EAClB,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,QAAQ,eAAe;AAAA,IAC/B,OAAOA,GAAE,OAAO,EAAE,aAAaA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC;AAAA,IACjE,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC;AAAA,EAC/C,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,QAAQ,cAAc;AAAA,EAChC,CAAC;AAAA,EACDA,GAAE,OAAO;AAAA,IACP,MAAMA,GAAE,QAAQ,MAAM;AAAA,EACxB,CAAC;AACH,CAAC;;;ADzWM,IAAM,YAAN,MAAgB;AAAA;AAAA;AAAA;AAAA,EAWrB,YACE,UAiBI,CAAC,GACL;AAxCJ;AAyCI,SAAK,WACH,2BAAqB,aAAQ,YAAR,YAAmB,QAAQ,OAAO,MAAvD,YACA;AACF,SAAK,SAAS,QAAQ;AAAA,EACxB;AAAA,EAEA,IAAY,aAAa;AACvB,WAAO;AAAA,MACL,SAAS,KAAK;AAAA,MACd,SAAS,OAAO;AAAA,QACd,qBAAqB;AAAA,QACrB,kBAAkB;AAAA,QAClB,aAAa,WAAW;AAAA,UACtB,QAAQ,KAAK;AAAA,UACb,yBAAyB;AAAA,UACzB,aAAa;AAAA,QACf,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEA,SACE,SACA,WAAsC,CAAC,GACvC;AACA,WAAO,IAAI,+BAA+B,SAAS,UAAU;AAAA,MAC3D,UAAU;AAAA,MACV,GAAG,KAAK;AAAA,IACV,CAAC;AAAA,EACH;AACF;AAKO,IAAM,YAAY,IAAI,UAAU;","names":["UnsupportedFunctionalityError","z","UnsupportedFunctionalityError","z"]}
|