@ai-sdk/anthropic 2.0.0-canary.1 → 2.0.0-canary.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +109 -0
- package/dist/index.d.mts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +190 -170
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +192 -172
- package/dist/index.mjs.map +1 -1
- package/{internal/dist → dist/internal}/index.d.mts +4 -4
- package/{internal/dist → dist/internal}/index.d.ts +4 -4
- package/{internal/dist → dist/internal}/index.js +187 -169
- package/dist/internal/index.js.map +1 -0
- package/{internal/dist → dist/internal}/index.mjs +189 -171
- package/dist/internal/index.mjs.map +1 -0
- package/internal.d.ts +1 -0
- package/package.json +16 -14
- package/internal/dist/index.js.map +0 -1
- package/internal/dist/index.mjs.map +0 -1
|
@@ -6,7 +6,7 @@ import {
|
|
|
6
6
|
combineHeaders,
|
|
7
7
|
createEventSourceResponseHandler,
|
|
8
8
|
createJsonResponseHandler,
|
|
9
|
-
parseProviderOptions,
|
|
9
|
+
parseProviderOptions as parseProviderOptions2,
|
|
10
10
|
postJsonToApi,
|
|
11
11
|
resolve
|
|
12
12
|
} from "@ai-sdk/provider-utils";
|
|
@@ -31,13 +31,15 @@ var anthropicFailedResponseHandler = createJsonErrorResponseHandler({
|
|
|
31
31
|
import {
|
|
32
32
|
UnsupportedFunctionalityError
|
|
33
33
|
} from "@ai-sdk/provider";
|
|
34
|
-
function prepareTools(
|
|
35
|
-
|
|
36
|
-
|
|
34
|
+
function prepareTools({
|
|
35
|
+
tools,
|
|
36
|
+
toolChoice
|
|
37
|
+
}) {
|
|
38
|
+
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
37
39
|
const toolWarnings = [];
|
|
38
40
|
const betas = /* @__PURE__ */ new Set();
|
|
39
41
|
if (tools == null) {
|
|
40
|
-
return { tools: void 0,
|
|
42
|
+
return { tools: void 0, toolChoice: void 0, toolWarnings, betas };
|
|
41
43
|
}
|
|
42
44
|
const anthropicTools2 = [];
|
|
43
45
|
for (const tool of tools) {
|
|
@@ -109,11 +111,10 @@ function prepareTools(mode) {
|
|
|
109
111
|
break;
|
|
110
112
|
}
|
|
111
113
|
}
|
|
112
|
-
const toolChoice = mode.toolChoice;
|
|
113
114
|
if (toolChoice == null) {
|
|
114
115
|
return {
|
|
115
116
|
tools: anthropicTools2,
|
|
116
|
-
|
|
117
|
+
toolChoice: void 0,
|
|
117
118
|
toolWarnings,
|
|
118
119
|
betas
|
|
119
120
|
};
|
|
@@ -123,30 +124,30 @@ function prepareTools(mode) {
|
|
|
123
124
|
case "auto":
|
|
124
125
|
return {
|
|
125
126
|
tools: anthropicTools2,
|
|
126
|
-
|
|
127
|
+
toolChoice: { type: "auto" },
|
|
127
128
|
toolWarnings,
|
|
128
129
|
betas
|
|
129
130
|
};
|
|
130
131
|
case "required":
|
|
131
132
|
return {
|
|
132
133
|
tools: anthropicTools2,
|
|
133
|
-
|
|
134
|
+
toolChoice: { type: "any" },
|
|
134
135
|
toolWarnings,
|
|
135
136
|
betas
|
|
136
137
|
};
|
|
137
138
|
case "none":
|
|
138
|
-
return { tools: void 0,
|
|
139
|
+
return { tools: void 0, toolChoice: void 0, toolWarnings, betas };
|
|
139
140
|
case "tool":
|
|
140
141
|
return {
|
|
141
142
|
tools: anthropicTools2,
|
|
142
|
-
|
|
143
|
+
toolChoice: { type: "tool", name: toolChoice.toolName },
|
|
143
144
|
toolWarnings,
|
|
144
145
|
betas
|
|
145
146
|
};
|
|
146
147
|
default: {
|
|
147
148
|
const _exhaustiveCheck = type;
|
|
148
149
|
throw new UnsupportedFunctionalityError({
|
|
149
|
-
functionality: `
|
|
150
|
+
functionality: `tool choice type: ${_exhaustiveCheck}`
|
|
150
151
|
});
|
|
151
152
|
}
|
|
152
153
|
}
|
|
@@ -156,13 +157,13 @@ function prepareTools(mode) {
|
|
|
156
157
|
import {
|
|
157
158
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
158
159
|
} from "@ai-sdk/provider";
|
|
159
|
-
import {
|
|
160
|
-
function convertToAnthropicMessagesPrompt({
|
|
160
|
+
import { convertToBase64, parseProviderOptions } from "@ai-sdk/provider-utils";
|
|
161
|
+
async function convertToAnthropicMessagesPrompt({
|
|
161
162
|
prompt,
|
|
162
163
|
sendReasoning,
|
|
163
164
|
warnings
|
|
164
165
|
}) {
|
|
165
|
-
var _a, _b, _c
|
|
166
|
+
var _a, _b, _c;
|
|
166
167
|
const betas = /* @__PURE__ */ new Set();
|
|
167
168
|
const blocks = groupIntoBlocks(prompt);
|
|
168
169
|
let system = void 0;
|
|
@@ -184,10 +185,10 @@ function convertToAnthropicMessagesPrompt({
|
|
|
184
185
|
functionality: "Multiple system messages that are separated by user/assistant messages"
|
|
185
186
|
});
|
|
186
187
|
}
|
|
187
|
-
system = block.messages.map(({ content,
|
|
188
|
+
system = block.messages.map(({ content, providerOptions }) => ({
|
|
188
189
|
type: "text",
|
|
189
190
|
text: content,
|
|
190
|
-
cache_control: getCacheControl(
|
|
191
|
+
cache_control: getCacheControl(providerOptions)
|
|
191
192
|
}));
|
|
192
193
|
break;
|
|
193
194
|
}
|
|
@@ -200,7 +201,7 @@ function convertToAnthropicMessagesPrompt({
|
|
|
200
201
|
for (let j = 0; j < content.length; j++) {
|
|
201
202
|
const part = content[j];
|
|
202
203
|
const isLastPart = j === content.length - 1;
|
|
203
|
-
const cacheControl = (_a = getCacheControl(part.
|
|
204
|
+
const cacheControl = (_a = getCacheControl(part.providerOptions)) != null ? _a : isLastPart ? getCacheControl(message.providerOptions) : void 0;
|
|
204
205
|
switch (part.type) {
|
|
205
206
|
case "text": {
|
|
206
207
|
anthropicContent.push({
|
|
@@ -210,42 +211,39 @@ function convertToAnthropicMessagesPrompt({
|
|
|
210
211
|
});
|
|
211
212
|
break;
|
|
212
213
|
}
|
|
213
|
-
case "image": {
|
|
214
|
-
anthropicContent.push({
|
|
215
|
-
type: "image",
|
|
216
|
-
source: part.image instanceof URL ? {
|
|
217
|
-
type: "url",
|
|
218
|
-
url: part.image.toString()
|
|
219
|
-
} : {
|
|
220
|
-
type: "base64",
|
|
221
|
-
media_type: (_b = part.mimeType) != null ? _b : "image/jpeg",
|
|
222
|
-
data: convertUint8ArrayToBase64(part.image)
|
|
223
|
-
},
|
|
224
|
-
cache_control: cacheControl
|
|
225
|
-
});
|
|
226
|
-
break;
|
|
227
|
-
}
|
|
228
214
|
case "file": {
|
|
229
|
-
if (part.
|
|
230
|
-
|
|
231
|
-
|
|
215
|
+
if (part.mediaType.startsWith("image/")) {
|
|
216
|
+
anthropicContent.push({
|
|
217
|
+
type: "image",
|
|
218
|
+
source: part.data instanceof URL ? {
|
|
219
|
+
type: "url",
|
|
220
|
+
url: part.data.toString()
|
|
221
|
+
} : {
|
|
222
|
+
type: "base64",
|
|
223
|
+
media_type: part.mediaType === "image/*" ? "image/jpeg" : part.mediaType,
|
|
224
|
+
data: convertToBase64(part.data)
|
|
225
|
+
},
|
|
226
|
+
cache_control: cacheControl
|
|
232
227
|
});
|
|
233
|
-
}
|
|
234
|
-
|
|
228
|
+
} else if (part.mediaType === "application/pdf") {
|
|
229
|
+
betas.add("pdfs-2024-09-25");
|
|
230
|
+
anthropicContent.push({
|
|
231
|
+
type: "document",
|
|
232
|
+
source: part.data instanceof URL ? {
|
|
233
|
+
type: "url",
|
|
234
|
+
url: part.data.toString()
|
|
235
|
+
} : {
|
|
236
|
+
type: "base64",
|
|
237
|
+
media_type: "application/pdf",
|
|
238
|
+
data: convertToBase64(part.data)
|
|
239
|
+
},
|
|
240
|
+
cache_control: cacheControl
|
|
241
|
+
});
|
|
242
|
+
} else {
|
|
235
243
|
throw new UnsupportedFunctionalityError2({
|
|
236
|
-
functionality:
|
|
244
|
+
functionality: `media type: ${part.mediaType}`
|
|
237
245
|
});
|
|
238
246
|
}
|
|
239
|
-
betas.add("pdfs-2024-09-25");
|
|
240
|
-
anthropicContent.push({
|
|
241
|
-
type: "document",
|
|
242
|
-
source: {
|
|
243
|
-
type: "base64",
|
|
244
|
-
media_type: "application/pdf",
|
|
245
|
-
data: part.data
|
|
246
|
-
},
|
|
247
|
-
cache_control: cacheControl
|
|
248
|
-
});
|
|
249
247
|
break;
|
|
250
248
|
}
|
|
251
249
|
}
|
|
@@ -256,7 +254,7 @@ function convertToAnthropicMessagesPrompt({
|
|
|
256
254
|
for (let i2 = 0; i2 < content.length; i2++) {
|
|
257
255
|
const part = content[i2];
|
|
258
256
|
const isLastPart = i2 === content.length - 1;
|
|
259
|
-
const cacheControl = (
|
|
257
|
+
const cacheControl = (_b = getCacheControl(part.providerOptions)) != null ? _b : isLastPart ? getCacheControl(message.providerOptions) : void 0;
|
|
260
258
|
const toolResultContent = part.content != null ? part.content.map((part2) => {
|
|
261
259
|
var _a2;
|
|
262
260
|
switch (part2.type) {
|
|
@@ -271,7 +269,7 @@ function convertToAnthropicMessagesPrompt({
|
|
|
271
269
|
type: "image",
|
|
272
270
|
source: {
|
|
273
271
|
type: "base64",
|
|
274
|
-
media_type: (_a2 = part2.
|
|
272
|
+
media_type: (_a2 = part2.mediaType) != null ? _a2 : "image/jpeg",
|
|
275
273
|
data: part2.data
|
|
276
274
|
},
|
|
277
275
|
cache_control: void 0
|
|
@@ -306,7 +304,7 @@ function convertToAnthropicMessagesPrompt({
|
|
|
306
304
|
for (let k = 0; k < content.length; k++) {
|
|
307
305
|
const part = content[k];
|
|
308
306
|
const isLastContentPart = k === content.length - 1;
|
|
309
|
-
const cacheControl = (
|
|
307
|
+
const cacheControl = (_c = getCacheControl(part.providerOptions)) != null ? _c : isLastContentPart ? getCacheControl(message.providerOptions) : void 0;
|
|
310
308
|
switch (part.type) {
|
|
311
309
|
case "text": {
|
|
312
310
|
anthropicContent.push({
|
|
@@ -323,12 +321,37 @@ function convertToAnthropicMessagesPrompt({
|
|
|
323
321
|
}
|
|
324
322
|
case "reasoning": {
|
|
325
323
|
if (sendReasoning) {
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
cache_control: cacheControl
|
|
324
|
+
const reasoningMetadata = await parseProviderOptions({
|
|
325
|
+
provider: "anthropic",
|
|
326
|
+
providerOptions: part.providerOptions,
|
|
327
|
+
schema: anthropicReasoningMetadataSchema
|
|
331
328
|
});
|
|
329
|
+
if (reasoningMetadata != null) {
|
|
330
|
+
if (reasoningMetadata.signature != null) {
|
|
331
|
+
anthropicContent.push({
|
|
332
|
+
type: "thinking",
|
|
333
|
+
thinking: part.text,
|
|
334
|
+
signature: reasoningMetadata.signature,
|
|
335
|
+
cache_control: cacheControl
|
|
336
|
+
});
|
|
337
|
+
} else if (reasoningMetadata.redactedData != null) {
|
|
338
|
+
anthropicContent.push({
|
|
339
|
+
type: "redacted_thinking",
|
|
340
|
+
data: reasoningMetadata.redactedData,
|
|
341
|
+
cache_control: cacheControl
|
|
342
|
+
});
|
|
343
|
+
} else {
|
|
344
|
+
warnings.push({
|
|
345
|
+
type: "other",
|
|
346
|
+
message: "unsupported reasoning metadata"
|
|
347
|
+
});
|
|
348
|
+
}
|
|
349
|
+
} else {
|
|
350
|
+
warnings.push({
|
|
351
|
+
type: "other",
|
|
352
|
+
message: "unsupported reasoning metadata"
|
|
353
|
+
});
|
|
354
|
+
}
|
|
332
355
|
} else {
|
|
333
356
|
warnings.push({
|
|
334
357
|
type: "other",
|
|
@@ -337,14 +360,6 @@ function convertToAnthropicMessagesPrompt({
|
|
|
337
360
|
}
|
|
338
361
|
break;
|
|
339
362
|
}
|
|
340
|
-
case "redacted-reasoning": {
|
|
341
|
-
anthropicContent.push({
|
|
342
|
-
type: "redacted_thinking",
|
|
343
|
-
data: part.data,
|
|
344
|
-
cache_control: cacheControl
|
|
345
|
-
});
|
|
346
|
-
break;
|
|
347
|
-
}
|
|
348
363
|
case "tool-call": {
|
|
349
364
|
anthropicContent.push({
|
|
350
365
|
type: "tool_use",
|
|
@@ -363,7 +378,7 @@ function convertToAnthropicMessagesPrompt({
|
|
|
363
378
|
}
|
|
364
379
|
default: {
|
|
365
380
|
const _exhaustiveCheck = type;
|
|
366
|
-
throw new Error(`
|
|
381
|
+
throw new Error(`content type: ${_exhaustiveCheck}`);
|
|
367
382
|
}
|
|
368
383
|
}
|
|
369
384
|
}
|
|
@@ -438,21 +453,23 @@ function mapAnthropicStopReason(finishReason) {
|
|
|
438
453
|
var AnthropicMessagesLanguageModel = class {
|
|
439
454
|
constructor(modelId, settings, config) {
|
|
440
455
|
this.specificationVersion = "v2";
|
|
441
|
-
this.defaultObjectGenerationMode = "tool";
|
|
442
456
|
this.modelId = modelId;
|
|
443
457
|
this.settings = settings;
|
|
444
458
|
this.config = config;
|
|
445
459
|
}
|
|
460
|
+
supportsUrl(url) {
|
|
461
|
+
return url.protocol === "https:";
|
|
462
|
+
}
|
|
446
463
|
get provider() {
|
|
447
464
|
return this.config.provider;
|
|
448
465
|
}
|
|
449
|
-
|
|
450
|
-
|
|
466
|
+
async getSupportedUrls() {
|
|
467
|
+
var _a, _b, _c;
|
|
468
|
+
return (_c = (_b = (_a = this.config).getSupportedUrls) == null ? void 0 : _b.call(_a)) != null ? _c : {};
|
|
451
469
|
}
|
|
452
470
|
async getArgs({
|
|
453
|
-
mode,
|
|
454
471
|
prompt,
|
|
455
|
-
|
|
472
|
+
maxOutputTokens = 4096,
|
|
456
473
|
// 4096: max model output tokens TODO update default in v5
|
|
457
474
|
temperature,
|
|
458
475
|
topP,
|
|
@@ -462,10 +479,11 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
462
479
|
stopSequences,
|
|
463
480
|
responseFormat,
|
|
464
481
|
seed,
|
|
465
|
-
|
|
482
|
+
tools,
|
|
483
|
+
toolChoice,
|
|
484
|
+
providerOptions
|
|
466
485
|
}) {
|
|
467
486
|
var _a, _b, _c;
|
|
468
|
-
const type = mode.type;
|
|
469
487
|
const warnings = [];
|
|
470
488
|
if (frequencyPenalty != null) {
|
|
471
489
|
warnings.push({
|
|
@@ -492,12 +510,12 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
492
510
|
details: "JSON response format is not supported."
|
|
493
511
|
});
|
|
494
512
|
}
|
|
495
|
-
const { prompt: messagesPrompt, betas: messagesBetas } = convertToAnthropicMessagesPrompt({
|
|
513
|
+
const { prompt: messagesPrompt, betas: messagesBetas } = await convertToAnthropicMessagesPrompt({
|
|
496
514
|
prompt,
|
|
497
515
|
sendReasoning: (_a = this.settings.sendReasoning) != null ? _a : true,
|
|
498
516
|
warnings
|
|
499
517
|
});
|
|
500
|
-
const anthropicOptions =
|
|
518
|
+
const anthropicOptions = await parseProviderOptions2({
|
|
501
519
|
provider: "anthropic",
|
|
502
520
|
providerOptions,
|
|
503
521
|
schema: anthropicProviderOptionsSchema
|
|
@@ -508,7 +526,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
508
526
|
// model id:
|
|
509
527
|
model: this.modelId,
|
|
510
528
|
// standardized settings:
|
|
511
|
-
max_tokens:
|
|
529
|
+
max_tokens: maxOutputTokens,
|
|
512
530
|
temperature,
|
|
513
531
|
top_k: topK,
|
|
514
532
|
top_p: topP,
|
|
@@ -551,44 +569,23 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
551
569
|
details: "topP is not supported when thinking is enabled"
|
|
552
570
|
});
|
|
553
571
|
}
|
|
554
|
-
baseArgs.max_tokens =
|
|
555
|
-
}
|
|
556
|
-
switch (type) {
|
|
557
|
-
case "regular": {
|
|
558
|
-
const {
|
|
559
|
-
tools,
|
|
560
|
-
tool_choice,
|
|
561
|
-
toolWarnings,
|
|
562
|
-
betas: toolsBetas
|
|
563
|
-
} = prepareTools(mode);
|
|
564
|
-
return {
|
|
565
|
-
args: { ...baseArgs, tools, tool_choice },
|
|
566
|
-
warnings: [...warnings, ...toolWarnings],
|
|
567
|
-
betas: /* @__PURE__ */ new Set([...messagesBetas, ...toolsBetas])
|
|
568
|
-
};
|
|
569
|
-
}
|
|
570
|
-
case "object-json": {
|
|
571
|
-
throw new UnsupportedFunctionalityError3({
|
|
572
|
-
functionality: "json-mode object generation"
|
|
573
|
-
});
|
|
574
|
-
}
|
|
575
|
-
case "object-tool": {
|
|
576
|
-
const { name, description, parameters } = mode.tool;
|
|
577
|
-
return {
|
|
578
|
-
args: {
|
|
579
|
-
...baseArgs,
|
|
580
|
-
tools: [{ name, description, input_schema: parameters }],
|
|
581
|
-
tool_choice: { type: "tool", name }
|
|
582
|
-
},
|
|
583
|
-
warnings,
|
|
584
|
-
betas: messagesBetas
|
|
585
|
-
};
|
|
586
|
-
}
|
|
587
|
-
default: {
|
|
588
|
-
const _exhaustiveCheck = type;
|
|
589
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
590
|
-
}
|
|
572
|
+
baseArgs.max_tokens = maxOutputTokens + thinkingBudget;
|
|
591
573
|
}
|
|
574
|
+
const {
|
|
575
|
+
tools: anthropicTools2,
|
|
576
|
+
toolChoice: anthropicToolChoice,
|
|
577
|
+
toolWarnings,
|
|
578
|
+
betas: toolsBetas
|
|
579
|
+
} = prepareTools({ tools, toolChoice });
|
|
580
|
+
return {
|
|
581
|
+
args: {
|
|
582
|
+
...baseArgs,
|
|
583
|
+
tools: anthropicTools2,
|
|
584
|
+
tool_choice: anthropicToolChoice
|
|
585
|
+
},
|
|
586
|
+
warnings: [...warnings, ...toolWarnings],
|
|
587
|
+
betas: /* @__PURE__ */ new Set([...messagesBetas, ...toolsBetas])
|
|
588
|
+
};
|
|
592
589
|
}
|
|
593
590
|
async getHeaders({
|
|
594
591
|
betas,
|
|
@@ -626,56 +623,62 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
626
623
|
abortSignal: options.abortSignal,
|
|
627
624
|
fetch: this.config.fetch
|
|
628
625
|
});
|
|
629
|
-
const
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
626
|
+
const content = [];
|
|
627
|
+
for (const part of response.content) {
|
|
628
|
+
switch (part.type) {
|
|
629
|
+
case "text": {
|
|
630
|
+
content.push({ type: "text", text: part.text });
|
|
631
|
+
break;
|
|
632
|
+
}
|
|
633
|
+
case "thinking": {
|
|
634
|
+
content.push({
|
|
635
|
+
type: "reasoning",
|
|
636
|
+
text: part.thinking,
|
|
637
|
+
providerMetadata: {
|
|
638
|
+
anthropic: {
|
|
639
|
+
signature: part.signature
|
|
640
|
+
}
|
|
641
|
+
}
|
|
642
|
+
});
|
|
643
|
+
break;
|
|
644
|
+
}
|
|
645
|
+
case "redacted_thinking": {
|
|
646
|
+
content.push({
|
|
647
|
+
type: "reasoning",
|
|
648
|
+
text: "",
|
|
649
|
+
providerMetadata: {
|
|
650
|
+
anthropic: {
|
|
651
|
+
redactedData: part.data
|
|
652
|
+
}
|
|
653
|
+
}
|
|
654
|
+
});
|
|
655
|
+
break;
|
|
656
|
+
}
|
|
657
|
+
case "tool_use": {
|
|
658
|
+
content.push({
|
|
659
|
+
type: "tool-call",
|
|
642
660
|
toolCallType: "function",
|
|
643
|
-
toolCallId:
|
|
644
|
-
toolName:
|
|
645
|
-
args: JSON.stringify(
|
|
661
|
+
toolCallId: part.id,
|
|
662
|
+
toolName: part.name,
|
|
663
|
+
args: JSON.stringify(part.input)
|
|
646
664
|
});
|
|
665
|
+
break;
|
|
647
666
|
}
|
|
648
667
|
}
|
|
649
668
|
}
|
|
650
|
-
const reasoning = response.content.filter(
|
|
651
|
-
(content) => content.type === "redacted_thinking" || content.type === "thinking"
|
|
652
|
-
).map(
|
|
653
|
-
(content) => content.type === "thinking" ? {
|
|
654
|
-
type: "text",
|
|
655
|
-
text: content.thinking,
|
|
656
|
-
signature: content.signature
|
|
657
|
-
} : {
|
|
658
|
-
type: "redacted",
|
|
659
|
-
data: content.data
|
|
660
|
-
}
|
|
661
|
-
);
|
|
662
669
|
return {
|
|
663
|
-
|
|
664
|
-
reasoning: reasoning.length > 0 ? reasoning : void 0,
|
|
665
|
-
toolCalls,
|
|
670
|
+
content,
|
|
666
671
|
finishReason: mapAnthropicStopReason(response.stop_reason),
|
|
667
672
|
usage: {
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
},
|
|
671
|
-
rawCall: { rawPrompt, rawSettings },
|
|
672
|
-
rawResponse: {
|
|
673
|
-
headers: responseHeaders,
|
|
674
|
-
body: rawResponse
|
|
673
|
+
inputTokens: response.usage.input_tokens,
|
|
674
|
+
outputTokens: response.usage.output_tokens
|
|
675
675
|
},
|
|
676
|
+
request: { body: args },
|
|
676
677
|
response: {
|
|
677
678
|
id: (_a = response.id) != null ? _a : void 0,
|
|
678
|
-
modelId: (_b = response.model) != null ? _b : void 0
|
|
679
|
+
modelId: (_b = response.model) != null ? _b : void 0,
|
|
680
|
+
headers: responseHeaders,
|
|
681
|
+
body: rawResponse
|
|
679
682
|
},
|
|
680
683
|
warnings,
|
|
681
684
|
providerMetadata: {
|
|
@@ -683,8 +686,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
683
686
|
cacheCreationInputTokens: (_c = response.usage.cache_creation_input_tokens) != null ? _c : null,
|
|
684
687
|
cacheReadInputTokens: (_d = response.usage.cache_read_input_tokens) != null ? _d : null
|
|
685
688
|
}
|
|
686
|
-
}
|
|
687
|
-
request: { body: JSON.stringify(args) }
|
|
689
|
+
}
|
|
688
690
|
};
|
|
689
691
|
}
|
|
690
692
|
async doStream(options) {
|
|
@@ -701,11 +703,10 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
701
703
|
abortSignal: options.abortSignal,
|
|
702
704
|
fetch: this.config.fetch
|
|
703
705
|
});
|
|
704
|
-
const { messages: rawPrompt, ...rawSettings } = args;
|
|
705
706
|
let finishReason = "unknown";
|
|
706
707
|
const usage = {
|
|
707
|
-
|
|
708
|
-
|
|
708
|
+
inputTokens: void 0,
|
|
709
|
+
outputTokens: void 0
|
|
709
710
|
};
|
|
710
711
|
const toolCallContentBlocks = {};
|
|
711
712
|
let providerMetadata = void 0;
|
|
@@ -713,6 +714,9 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
713
714
|
return {
|
|
714
715
|
stream: response.pipeThrough(
|
|
715
716
|
new TransformStream({
|
|
717
|
+
start(controller) {
|
|
718
|
+
controller.enqueue({ type: "stream-start", warnings });
|
|
719
|
+
},
|
|
716
720
|
transform(chunk, controller) {
|
|
717
721
|
var _a, _b, _c, _d;
|
|
718
722
|
if (!chunk.success) {
|
|
@@ -734,9 +738,15 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
734
738
|
}
|
|
735
739
|
case "redacted_thinking": {
|
|
736
740
|
controller.enqueue({
|
|
737
|
-
type: "
|
|
738
|
-
|
|
741
|
+
type: "reasoning",
|
|
742
|
+
text: "",
|
|
743
|
+
providerMetadata: {
|
|
744
|
+
anthropic: {
|
|
745
|
+
redactedData: value.content_block.data
|
|
746
|
+
}
|
|
747
|
+
}
|
|
739
748
|
});
|
|
749
|
+
controller.enqueue({ type: "reasoning-part-finish" });
|
|
740
750
|
return;
|
|
741
751
|
}
|
|
742
752
|
case "tool_use": {
|
|
@@ -775,24 +785,30 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
775
785
|
switch (deltaType) {
|
|
776
786
|
case "text_delta": {
|
|
777
787
|
controller.enqueue({
|
|
778
|
-
type: "text
|
|
779
|
-
|
|
788
|
+
type: "text",
|
|
789
|
+
text: value.delta.text
|
|
780
790
|
});
|
|
781
791
|
return;
|
|
782
792
|
}
|
|
783
793
|
case "thinking_delta": {
|
|
784
794
|
controller.enqueue({
|
|
785
795
|
type: "reasoning",
|
|
786
|
-
|
|
796
|
+
text: value.delta.thinking
|
|
787
797
|
});
|
|
788
798
|
return;
|
|
789
799
|
}
|
|
790
800
|
case "signature_delta": {
|
|
791
801
|
if (blockType === "thinking") {
|
|
792
802
|
controller.enqueue({
|
|
793
|
-
type: "reasoning
|
|
794
|
-
|
|
803
|
+
type: "reasoning",
|
|
804
|
+
text: "",
|
|
805
|
+
providerMetadata: {
|
|
806
|
+
anthropic: {
|
|
807
|
+
signature: value.delta.signature
|
|
808
|
+
}
|
|
809
|
+
}
|
|
795
810
|
});
|
|
811
|
+
controller.enqueue({ type: "reasoning-part-finish" });
|
|
796
812
|
}
|
|
797
813
|
return;
|
|
798
814
|
}
|
|
@@ -817,8 +833,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
817
833
|
}
|
|
818
834
|
}
|
|
819
835
|
case "message_start": {
|
|
820
|
-
usage.
|
|
821
|
-
usage.
|
|
836
|
+
usage.inputTokens = value.message.usage.input_tokens;
|
|
837
|
+
usage.outputTokens = value.message.usage.output_tokens;
|
|
822
838
|
providerMetadata = {
|
|
823
839
|
anthropic: {
|
|
824
840
|
cacheCreationInputTokens: (_a = value.message.usage.cache_creation_input_tokens) != null ? _a : null,
|
|
@@ -833,7 +849,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
833
849
|
return;
|
|
834
850
|
}
|
|
835
851
|
case "message_delta": {
|
|
836
|
-
usage.
|
|
852
|
+
usage.outputTokens = value.usage.output_tokens;
|
|
837
853
|
finishReason = mapAnthropicStopReason(value.delta.stop_reason);
|
|
838
854
|
return;
|
|
839
855
|
}
|
|
@@ -858,10 +874,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
858
874
|
}
|
|
859
875
|
})
|
|
860
876
|
),
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
warnings,
|
|
864
|
-
request: { body: JSON.stringify(body) }
|
|
877
|
+
request: { body },
|
|
878
|
+
response: { headers: responseHeaders }
|
|
865
879
|
};
|
|
866
880
|
}
|
|
867
881
|
};
|
|
@@ -988,6 +1002,10 @@ var anthropicProviderOptionsSchema = z2.object({
|
|
|
988
1002
|
budgetTokens: z2.number().optional()
|
|
989
1003
|
}).optional()
|
|
990
1004
|
});
|
|
1005
|
+
var anthropicReasoningMetadataSchema = z2.object({
|
|
1006
|
+
signature: z2.string().optional(),
|
|
1007
|
+
redactedData: z2.string().optional()
|
|
1008
|
+
});
|
|
991
1009
|
|
|
992
1010
|
// src/anthropic-tools.ts
|
|
993
1011
|
import { z as z3 } from "zod";
|