@ai-sdk/anthropic 2.0.0-canary.1 → 2.0.0-canary.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +109 -0
- package/dist/index.d.mts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +190 -170
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +192 -172
- package/dist/index.mjs.map +1 -1
- package/{internal/dist → dist/internal}/index.d.mts +4 -4
- package/{internal/dist → dist/internal}/index.d.ts +4 -4
- package/{internal/dist → dist/internal}/index.js +187 -169
- package/dist/internal/index.js.map +1 -0
- package/{internal/dist → dist/internal}/index.mjs +189 -171
- package/dist/internal/index.mjs.map +1 -0
- package/internal.d.ts +1 -0
- package/package.json +16 -14
- package/internal/dist/index.js.map +0 -1
- package/internal/dist/index.mjs.map +0 -1
package/dist/index.mjs
CHANGED
|
@@ -15,7 +15,7 @@ import {
|
|
|
15
15
|
combineHeaders,
|
|
16
16
|
createEventSourceResponseHandler,
|
|
17
17
|
createJsonResponseHandler,
|
|
18
|
-
parseProviderOptions,
|
|
18
|
+
parseProviderOptions as parseProviderOptions2,
|
|
19
19
|
postJsonToApi,
|
|
20
20
|
resolve
|
|
21
21
|
} from "@ai-sdk/provider-utils";
|
|
@@ -40,13 +40,15 @@ var anthropicFailedResponseHandler = createJsonErrorResponseHandler({
|
|
|
40
40
|
import {
|
|
41
41
|
UnsupportedFunctionalityError
|
|
42
42
|
} from "@ai-sdk/provider";
|
|
43
|
-
function prepareTools(
|
|
44
|
-
|
|
45
|
-
|
|
43
|
+
function prepareTools({
|
|
44
|
+
tools,
|
|
45
|
+
toolChoice
|
|
46
|
+
}) {
|
|
47
|
+
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
46
48
|
const toolWarnings = [];
|
|
47
49
|
const betas = /* @__PURE__ */ new Set();
|
|
48
50
|
if (tools == null) {
|
|
49
|
-
return { tools: void 0,
|
|
51
|
+
return { tools: void 0, toolChoice: void 0, toolWarnings, betas };
|
|
50
52
|
}
|
|
51
53
|
const anthropicTools2 = [];
|
|
52
54
|
for (const tool of tools) {
|
|
@@ -118,11 +120,10 @@ function prepareTools(mode) {
|
|
|
118
120
|
break;
|
|
119
121
|
}
|
|
120
122
|
}
|
|
121
|
-
const toolChoice = mode.toolChoice;
|
|
122
123
|
if (toolChoice == null) {
|
|
123
124
|
return {
|
|
124
125
|
tools: anthropicTools2,
|
|
125
|
-
|
|
126
|
+
toolChoice: void 0,
|
|
126
127
|
toolWarnings,
|
|
127
128
|
betas
|
|
128
129
|
};
|
|
@@ -132,30 +133,30 @@ function prepareTools(mode) {
|
|
|
132
133
|
case "auto":
|
|
133
134
|
return {
|
|
134
135
|
tools: anthropicTools2,
|
|
135
|
-
|
|
136
|
+
toolChoice: { type: "auto" },
|
|
136
137
|
toolWarnings,
|
|
137
138
|
betas
|
|
138
139
|
};
|
|
139
140
|
case "required":
|
|
140
141
|
return {
|
|
141
142
|
tools: anthropicTools2,
|
|
142
|
-
|
|
143
|
+
toolChoice: { type: "any" },
|
|
143
144
|
toolWarnings,
|
|
144
145
|
betas
|
|
145
146
|
};
|
|
146
147
|
case "none":
|
|
147
|
-
return { tools: void 0,
|
|
148
|
+
return { tools: void 0, toolChoice: void 0, toolWarnings, betas };
|
|
148
149
|
case "tool":
|
|
149
150
|
return {
|
|
150
151
|
tools: anthropicTools2,
|
|
151
|
-
|
|
152
|
+
toolChoice: { type: "tool", name: toolChoice.toolName },
|
|
152
153
|
toolWarnings,
|
|
153
154
|
betas
|
|
154
155
|
};
|
|
155
156
|
default: {
|
|
156
157
|
const _exhaustiveCheck = type;
|
|
157
158
|
throw new UnsupportedFunctionalityError({
|
|
158
|
-
functionality: `
|
|
159
|
+
functionality: `tool choice type: ${_exhaustiveCheck}`
|
|
159
160
|
});
|
|
160
161
|
}
|
|
161
162
|
}
|
|
@@ -165,13 +166,13 @@ function prepareTools(mode) {
|
|
|
165
166
|
import {
|
|
166
167
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
167
168
|
} from "@ai-sdk/provider";
|
|
168
|
-
import {
|
|
169
|
-
function convertToAnthropicMessagesPrompt({
|
|
169
|
+
import { convertToBase64, parseProviderOptions } from "@ai-sdk/provider-utils";
|
|
170
|
+
async function convertToAnthropicMessagesPrompt({
|
|
170
171
|
prompt,
|
|
171
172
|
sendReasoning,
|
|
172
173
|
warnings
|
|
173
174
|
}) {
|
|
174
|
-
var _a, _b, _c
|
|
175
|
+
var _a, _b, _c;
|
|
175
176
|
const betas = /* @__PURE__ */ new Set();
|
|
176
177
|
const blocks = groupIntoBlocks(prompt);
|
|
177
178
|
let system = void 0;
|
|
@@ -193,10 +194,10 @@ function convertToAnthropicMessagesPrompt({
|
|
|
193
194
|
functionality: "Multiple system messages that are separated by user/assistant messages"
|
|
194
195
|
});
|
|
195
196
|
}
|
|
196
|
-
system = block.messages.map(({ content,
|
|
197
|
+
system = block.messages.map(({ content, providerOptions }) => ({
|
|
197
198
|
type: "text",
|
|
198
199
|
text: content,
|
|
199
|
-
cache_control: getCacheControl(
|
|
200
|
+
cache_control: getCacheControl(providerOptions)
|
|
200
201
|
}));
|
|
201
202
|
break;
|
|
202
203
|
}
|
|
@@ -209,7 +210,7 @@ function convertToAnthropicMessagesPrompt({
|
|
|
209
210
|
for (let j = 0; j < content.length; j++) {
|
|
210
211
|
const part = content[j];
|
|
211
212
|
const isLastPart = j === content.length - 1;
|
|
212
|
-
const cacheControl = (_a = getCacheControl(part.
|
|
213
|
+
const cacheControl = (_a = getCacheControl(part.providerOptions)) != null ? _a : isLastPart ? getCacheControl(message.providerOptions) : void 0;
|
|
213
214
|
switch (part.type) {
|
|
214
215
|
case "text": {
|
|
215
216
|
anthropicContent.push({
|
|
@@ -219,42 +220,39 @@ function convertToAnthropicMessagesPrompt({
|
|
|
219
220
|
});
|
|
220
221
|
break;
|
|
221
222
|
}
|
|
222
|
-
case "image": {
|
|
223
|
-
anthropicContent.push({
|
|
224
|
-
type: "image",
|
|
225
|
-
source: part.image instanceof URL ? {
|
|
226
|
-
type: "url",
|
|
227
|
-
url: part.image.toString()
|
|
228
|
-
} : {
|
|
229
|
-
type: "base64",
|
|
230
|
-
media_type: (_b = part.mimeType) != null ? _b : "image/jpeg",
|
|
231
|
-
data: convertUint8ArrayToBase64(part.image)
|
|
232
|
-
},
|
|
233
|
-
cache_control: cacheControl
|
|
234
|
-
});
|
|
235
|
-
break;
|
|
236
|
-
}
|
|
237
223
|
case "file": {
|
|
238
|
-
if (part.
|
|
239
|
-
|
|
240
|
-
|
|
224
|
+
if (part.mediaType.startsWith("image/")) {
|
|
225
|
+
anthropicContent.push({
|
|
226
|
+
type: "image",
|
|
227
|
+
source: part.data instanceof URL ? {
|
|
228
|
+
type: "url",
|
|
229
|
+
url: part.data.toString()
|
|
230
|
+
} : {
|
|
231
|
+
type: "base64",
|
|
232
|
+
media_type: part.mediaType === "image/*" ? "image/jpeg" : part.mediaType,
|
|
233
|
+
data: convertToBase64(part.data)
|
|
234
|
+
},
|
|
235
|
+
cache_control: cacheControl
|
|
241
236
|
});
|
|
242
|
-
}
|
|
243
|
-
|
|
237
|
+
} else if (part.mediaType === "application/pdf") {
|
|
238
|
+
betas.add("pdfs-2024-09-25");
|
|
239
|
+
anthropicContent.push({
|
|
240
|
+
type: "document",
|
|
241
|
+
source: part.data instanceof URL ? {
|
|
242
|
+
type: "url",
|
|
243
|
+
url: part.data.toString()
|
|
244
|
+
} : {
|
|
245
|
+
type: "base64",
|
|
246
|
+
media_type: "application/pdf",
|
|
247
|
+
data: convertToBase64(part.data)
|
|
248
|
+
},
|
|
249
|
+
cache_control: cacheControl
|
|
250
|
+
});
|
|
251
|
+
} else {
|
|
244
252
|
throw new UnsupportedFunctionalityError2({
|
|
245
|
-
functionality:
|
|
253
|
+
functionality: `media type: ${part.mediaType}`
|
|
246
254
|
});
|
|
247
255
|
}
|
|
248
|
-
betas.add("pdfs-2024-09-25");
|
|
249
|
-
anthropicContent.push({
|
|
250
|
-
type: "document",
|
|
251
|
-
source: {
|
|
252
|
-
type: "base64",
|
|
253
|
-
media_type: "application/pdf",
|
|
254
|
-
data: part.data
|
|
255
|
-
},
|
|
256
|
-
cache_control: cacheControl
|
|
257
|
-
});
|
|
258
256
|
break;
|
|
259
257
|
}
|
|
260
258
|
}
|
|
@@ -265,7 +263,7 @@ function convertToAnthropicMessagesPrompt({
|
|
|
265
263
|
for (let i2 = 0; i2 < content.length; i2++) {
|
|
266
264
|
const part = content[i2];
|
|
267
265
|
const isLastPart = i2 === content.length - 1;
|
|
268
|
-
const cacheControl = (
|
|
266
|
+
const cacheControl = (_b = getCacheControl(part.providerOptions)) != null ? _b : isLastPart ? getCacheControl(message.providerOptions) : void 0;
|
|
269
267
|
const toolResultContent = part.content != null ? part.content.map((part2) => {
|
|
270
268
|
var _a2;
|
|
271
269
|
switch (part2.type) {
|
|
@@ -280,7 +278,7 @@ function convertToAnthropicMessagesPrompt({
|
|
|
280
278
|
type: "image",
|
|
281
279
|
source: {
|
|
282
280
|
type: "base64",
|
|
283
|
-
media_type: (_a2 = part2.
|
|
281
|
+
media_type: (_a2 = part2.mediaType) != null ? _a2 : "image/jpeg",
|
|
284
282
|
data: part2.data
|
|
285
283
|
},
|
|
286
284
|
cache_control: void 0
|
|
@@ -315,7 +313,7 @@ function convertToAnthropicMessagesPrompt({
|
|
|
315
313
|
for (let k = 0; k < content.length; k++) {
|
|
316
314
|
const part = content[k];
|
|
317
315
|
const isLastContentPart = k === content.length - 1;
|
|
318
|
-
const cacheControl = (
|
|
316
|
+
const cacheControl = (_c = getCacheControl(part.providerOptions)) != null ? _c : isLastContentPart ? getCacheControl(message.providerOptions) : void 0;
|
|
319
317
|
switch (part.type) {
|
|
320
318
|
case "text": {
|
|
321
319
|
anthropicContent.push({
|
|
@@ -332,12 +330,37 @@ function convertToAnthropicMessagesPrompt({
|
|
|
332
330
|
}
|
|
333
331
|
case "reasoning": {
|
|
334
332
|
if (sendReasoning) {
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
cache_control: cacheControl
|
|
333
|
+
const reasoningMetadata = await parseProviderOptions({
|
|
334
|
+
provider: "anthropic",
|
|
335
|
+
providerOptions: part.providerOptions,
|
|
336
|
+
schema: anthropicReasoningMetadataSchema
|
|
340
337
|
});
|
|
338
|
+
if (reasoningMetadata != null) {
|
|
339
|
+
if (reasoningMetadata.signature != null) {
|
|
340
|
+
anthropicContent.push({
|
|
341
|
+
type: "thinking",
|
|
342
|
+
thinking: part.text,
|
|
343
|
+
signature: reasoningMetadata.signature,
|
|
344
|
+
cache_control: cacheControl
|
|
345
|
+
});
|
|
346
|
+
} else if (reasoningMetadata.redactedData != null) {
|
|
347
|
+
anthropicContent.push({
|
|
348
|
+
type: "redacted_thinking",
|
|
349
|
+
data: reasoningMetadata.redactedData,
|
|
350
|
+
cache_control: cacheControl
|
|
351
|
+
});
|
|
352
|
+
} else {
|
|
353
|
+
warnings.push({
|
|
354
|
+
type: "other",
|
|
355
|
+
message: "unsupported reasoning metadata"
|
|
356
|
+
});
|
|
357
|
+
}
|
|
358
|
+
} else {
|
|
359
|
+
warnings.push({
|
|
360
|
+
type: "other",
|
|
361
|
+
message: "unsupported reasoning metadata"
|
|
362
|
+
});
|
|
363
|
+
}
|
|
341
364
|
} else {
|
|
342
365
|
warnings.push({
|
|
343
366
|
type: "other",
|
|
@@ -346,14 +369,6 @@ function convertToAnthropicMessagesPrompt({
|
|
|
346
369
|
}
|
|
347
370
|
break;
|
|
348
371
|
}
|
|
349
|
-
case "redacted-reasoning": {
|
|
350
|
-
anthropicContent.push({
|
|
351
|
-
type: "redacted_thinking",
|
|
352
|
-
data: part.data,
|
|
353
|
-
cache_control: cacheControl
|
|
354
|
-
});
|
|
355
|
-
break;
|
|
356
|
-
}
|
|
357
372
|
case "tool-call": {
|
|
358
373
|
anthropicContent.push({
|
|
359
374
|
type: "tool_use",
|
|
@@ -372,7 +387,7 @@ function convertToAnthropicMessagesPrompt({
|
|
|
372
387
|
}
|
|
373
388
|
default: {
|
|
374
389
|
const _exhaustiveCheck = type;
|
|
375
|
-
throw new Error(`
|
|
390
|
+
throw new Error(`content type: ${_exhaustiveCheck}`);
|
|
376
391
|
}
|
|
377
392
|
}
|
|
378
393
|
}
|
|
@@ -447,21 +462,23 @@ function mapAnthropicStopReason(finishReason) {
|
|
|
447
462
|
var AnthropicMessagesLanguageModel = class {
|
|
448
463
|
constructor(modelId, settings, config) {
|
|
449
464
|
this.specificationVersion = "v2";
|
|
450
|
-
this.defaultObjectGenerationMode = "tool";
|
|
451
465
|
this.modelId = modelId;
|
|
452
466
|
this.settings = settings;
|
|
453
467
|
this.config = config;
|
|
454
468
|
}
|
|
469
|
+
supportsUrl(url) {
|
|
470
|
+
return url.protocol === "https:";
|
|
471
|
+
}
|
|
455
472
|
get provider() {
|
|
456
473
|
return this.config.provider;
|
|
457
474
|
}
|
|
458
|
-
|
|
459
|
-
|
|
475
|
+
async getSupportedUrls() {
|
|
476
|
+
var _a, _b, _c;
|
|
477
|
+
return (_c = (_b = (_a = this.config).getSupportedUrls) == null ? void 0 : _b.call(_a)) != null ? _c : {};
|
|
460
478
|
}
|
|
461
479
|
async getArgs({
|
|
462
|
-
mode,
|
|
463
480
|
prompt,
|
|
464
|
-
|
|
481
|
+
maxOutputTokens = 4096,
|
|
465
482
|
// 4096: max model output tokens TODO update default in v5
|
|
466
483
|
temperature,
|
|
467
484
|
topP,
|
|
@@ -471,10 +488,11 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
471
488
|
stopSequences,
|
|
472
489
|
responseFormat,
|
|
473
490
|
seed,
|
|
474
|
-
|
|
491
|
+
tools,
|
|
492
|
+
toolChoice,
|
|
493
|
+
providerOptions
|
|
475
494
|
}) {
|
|
476
495
|
var _a, _b, _c;
|
|
477
|
-
const type = mode.type;
|
|
478
496
|
const warnings = [];
|
|
479
497
|
if (frequencyPenalty != null) {
|
|
480
498
|
warnings.push({
|
|
@@ -501,12 +519,12 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
501
519
|
details: "JSON response format is not supported."
|
|
502
520
|
});
|
|
503
521
|
}
|
|
504
|
-
const { prompt: messagesPrompt, betas: messagesBetas } = convertToAnthropicMessagesPrompt({
|
|
522
|
+
const { prompt: messagesPrompt, betas: messagesBetas } = await convertToAnthropicMessagesPrompt({
|
|
505
523
|
prompt,
|
|
506
524
|
sendReasoning: (_a = this.settings.sendReasoning) != null ? _a : true,
|
|
507
525
|
warnings
|
|
508
526
|
});
|
|
509
|
-
const anthropicOptions =
|
|
527
|
+
const anthropicOptions = await parseProviderOptions2({
|
|
510
528
|
provider: "anthropic",
|
|
511
529
|
providerOptions,
|
|
512
530
|
schema: anthropicProviderOptionsSchema
|
|
@@ -517,7 +535,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
517
535
|
// model id:
|
|
518
536
|
model: this.modelId,
|
|
519
537
|
// standardized settings:
|
|
520
|
-
max_tokens:
|
|
538
|
+
max_tokens: maxOutputTokens,
|
|
521
539
|
temperature,
|
|
522
540
|
top_k: topK,
|
|
523
541
|
top_p: topP,
|
|
@@ -560,44 +578,23 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
560
578
|
details: "topP is not supported when thinking is enabled"
|
|
561
579
|
});
|
|
562
580
|
}
|
|
563
|
-
baseArgs.max_tokens =
|
|
564
|
-
}
|
|
565
|
-
switch (type) {
|
|
566
|
-
case "regular": {
|
|
567
|
-
const {
|
|
568
|
-
tools,
|
|
569
|
-
tool_choice,
|
|
570
|
-
toolWarnings,
|
|
571
|
-
betas: toolsBetas
|
|
572
|
-
} = prepareTools(mode);
|
|
573
|
-
return {
|
|
574
|
-
args: { ...baseArgs, tools, tool_choice },
|
|
575
|
-
warnings: [...warnings, ...toolWarnings],
|
|
576
|
-
betas: /* @__PURE__ */ new Set([...messagesBetas, ...toolsBetas])
|
|
577
|
-
};
|
|
578
|
-
}
|
|
579
|
-
case "object-json": {
|
|
580
|
-
throw new UnsupportedFunctionalityError3({
|
|
581
|
-
functionality: "json-mode object generation"
|
|
582
|
-
});
|
|
583
|
-
}
|
|
584
|
-
case "object-tool": {
|
|
585
|
-
const { name, description, parameters } = mode.tool;
|
|
586
|
-
return {
|
|
587
|
-
args: {
|
|
588
|
-
...baseArgs,
|
|
589
|
-
tools: [{ name, description, input_schema: parameters }],
|
|
590
|
-
tool_choice: { type: "tool", name }
|
|
591
|
-
},
|
|
592
|
-
warnings,
|
|
593
|
-
betas: messagesBetas
|
|
594
|
-
};
|
|
595
|
-
}
|
|
596
|
-
default: {
|
|
597
|
-
const _exhaustiveCheck = type;
|
|
598
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
599
|
-
}
|
|
581
|
+
baseArgs.max_tokens = maxOutputTokens + thinkingBudget;
|
|
600
582
|
}
|
|
583
|
+
const {
|
|
584
|
+
tools: anthropicTools2,
|
|
585
|
+
toolChoice: anthropicToolChoice,
|
|
586
|
+
toolWarnings,
|
|
587
|
+
betas: toolsBetas
|
|
588
|
+
} = prepareTools({ tools, toolChoice });
|
|
589
|
+
return {
|
|
590
|
+
args: {
|
|
591
|
+
...baseArgs,
|
|
592
|
+
tools: anthropicTools2,
|
|
593
|
+
tool_choice: anthropicToolChoice
|
|
594
|
+
},
|
|
595
|
+
warnings: [...warnings, ...toolWarnings],
|
|
596
|
+
betas: /* @__PURE__ */ new Set([...messagesBetas, ...toolsBetas])
|
|
597
|
+
};
|
|
601
598
|
}
|
|
602
599
|
async getHeaders({
|
|
603
600
|
betas,
|
|
@@ -635,56 +632,62 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
635
632
|
abortSignal: options.abortSignal,
|
|
636
633
|
fetch: this.config.fetch
|
|
637
634
|
});
|
|
638
|
-
const
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
635
|
+
const content = [];
|
|
636
|
+
for (const part of response.content) {
|
|
637
|
+
switch (part.type) {
|
|
638
|
+
case "text": {
|
|
639
|
+
content.push({ type: "text", text: part.text });
|
|
640
|
+
break;
|
|
641
|
+
}
|
|
642
|
+
case "thinking": {
|
|
643
|
+
content.push({
|
|
644
|
+
type: "reasoning",
|
|
645
|
+
text: part.thinking,
|
|
646
|
+
providerMetadata: {
|
|
647
|
+
anthropic: {
|
|
648
|
+
signature: part.signature
|
|
649
|
+
}
|
|
650
|
+
}
|
|
651
|
+
});
|
|
652
|
+
break;
|
|
653
|
+
}
|
|
654
|
+
case "redacted_thinking": {
|
|
655
|
+
content.push({
|
|
656
|
+
type: "reasoning",
|
|
657
|
+
text: "",
|
|
658
|
+
providerMetadata: {
|
|
659
|
+
anthropic: {
|
|
660
|
+
redactedData: part.data
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
});
|
|
664
|
+
break;
|
|
665
|
+
}
|
|
666
|
+
case "tool_use": {
|
|
667
|
+
content.push({
|
|
668
|
+
type: "tool-call",
|
|
651
669
|
toolCallType: "function",
|
|
652
|
-
toolCallId:
|
|
653
|
-
toolName:
|
|
654
|
-
args: JSON.stringify(
|
|
670
|
+
toolCallId: part.id,
|
|
671
|
+
toolName: part.name,
|
|
672
|
+
args: JSON.stringify(part.input)
|
|
655
673
|
});
|
|
674
|
+
break;
|
|
656
675
|
}
|
|
657
676
|
}
|
|
658
677
|
}
|
|
659
|
-
const reasoning = response.content.filter(
|
|
660
|
-
(content) => content.type === "redacted_thinking" || content.type === "thinking"
|
|
661
|
-
).map(
|
|
662
|
-
(content) => content.type === "thinking" ? {
|
|
663
|
-
type: "text",
|
|
664
|
-
text: content.thinking,
|
|
665
|
-
signature: content.signature
|
|
666
|
-
} : {
|
|
667
|
-
type: "redacted",
|
|
668
|
-
data: content.data
|
|
669
|
-
}
|
|
670
|
-
);
|
|
671
678
|
return {
|
|
672
|
-
|
|
673
|
-
reasoning: reasoning.length > 0 ? reasoning : void 0,
|
|
674
|
-
toolCalls,
|
|
679
|
+
content,
|
|
675
680
|
finishReason: mapAnthropicStopReason(response.stop_reason),
|
|
676
681
|
usage: {
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
},
|
|
680
|
-
rawCall: { rawPrompt, rawSettings },
|
|
681
|
-
rawResponse: {
|
|
682
|
-
headers: responseHeaders,
|
|
683
|
-
body: rawResponse
|
|
682
|
+
inputTokens: response.usage.input_tokens,
|
|
683
|
+
outputTokens: response.usage.output_tokens
|
|
684
684
|
},
|
|
685
|
+
request: { body: args },
|
|
685
686
|
response: {
|
|
686
687
|
id: (_a = response.id) != null ? _a : void 0,
|
|
687
|
-
modelId: (_b = response.model) != null ? _b : void 0
|
|
688
|
+
modelId: (_b = response.model) != null ? _b : void 0,
|
|
689
|
+
headers: responseHeaders,
|
|
690
|
+
body: rawResponse
|
|
688
691
|
},
|
|
689
692
|
warnings,
|
|
690
693
|
providerMetadata: {
|
|
@@ -692,8 +695,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
692
695
|
cacheCreationInputTokens: (_c = response.usage.cache_creation_input_tokens) != null ? _c : null,
|
|
693
696
|
cacheReadInputTokens: (_d = response.usage.cache_read_input_tokens) != null ? _d : null
|
|
694
697
|
}
|
|
695
|
-
}
|
|
696
|
-
request: { body: JSON.stringify(args) }
|
|
698
|
+
}
|
|
697
699
|
};
|
|
698
700
|
}
|
|
699
701
|
async doStream(options) {
|
|
@@ -710,11 +712,10 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
710
712
|
abortSignal: options.abortSignal,
|
|
711
713
|
fetch: this.config.fetch
|
|
712
714
|
});
|
|
713
|
-
const { messages: rawPrompt, ...rawSettings } = args;
|
|
714
715
|
let finishReason = "unknown";
|
|
715
716
|
const usage = {
|
|
716
|
-
|
|
717
|
-
|
|
717
|
+
inputTokens: void 0,
|
|
718
|
+
outputTokens: void 0
|
|
718
719
|
};
|
|
719
720
|
const toolCallContentBlocks = {};
|
|
720
721
|
let providerMetadata = void 0;
|
|
@@ -722,6 +723,9 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
722
723
|
return {
|
|
723
724
|
stream: response.pipeThrough(
|
|
724
725
|
new TransformStream({
|
|
726
|
+
start(controller) {
|
|
727
|
+
controller.enqueue({ type: "stream-start", warnings });
|
|
728
|
+
},
|
|
725
729
|
transform(chunk, controller) {
|
|
726
730
|
var _a, _b, _c, _d;
|
|
727
731
|
if (!chunk.success) {
|
|
@@ -743,9 +747,15 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
743
747
|
}
|
|
744
748
|
case "redacted_thinking": {
|
|
745
749
|
controller.enqueue({
|
|
746
|
-
type: "
|
|
747
|
-
|
|
750
|
+
type: "reasoning",
|
|
751
|
+
text: "",
|
|
752
|
+
providerMetadata: {
|
|
753
|
+
anthropic: {
|
|
754
|
+
redactedData: value.content_block.data
|
|
755
|
+
}
|
|
756
|
+
}
|
|
748
757
|
});
|
|
758
|
+
controller.enqueue({ type: "reasoning-part-finish" });
|
|
749
759
|
return;
|
|
750
760
|
}
|
|
751
761
|
case "tool_use": {
|
|
@@ -784,24 +794,30 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
784
794
|
switch (deltaType) {
|
|
785
795
|
case "text_delta": {
|
|
786
796
|
controller.enqueue({
|
|
787
|
-
type: "text
|
|
788
|
-
|
|
797
|
+
type: "text",
|
|
798
|
+
text: value.delta.text
|
|
789
799
|
});
|
|
790
800
|
return;
|
|
791
801
|
}
|
|
792
802
|
case "thinking_delta": {
|
|
793
803
|
controller.enqueue({
|
|
794
804
|
type: "reasoning",
|
|
795
|
-
|
|
805
|
+
text: value.delta.thinking
|
|
796
806
|
});
|
|
797
807
|
return;
|
|
798
808
|
}
|
|
799
809
|
case "signature_delta": {
|
|
800
810
|
if (blockType === "thinking") {
|
|
801
811
|
controller.enqueue({
|
|
802
|
-
type: "reasoning
|
|
803
|
-
|
|
812
|
+
type: "reasoning",
|
|
813
|
+
text: "",
|
|
814
|
+
providerMetadata: {
|
|
815
|
+
anthropic: {
|
|
816
|
+
signature: value.delta.signature
|
|
817
|
+
}
|
|
818
|
+
}
|
|
804
819
|
});
|
|
820
|
+
controller.enqueue({ type: "reasoning-part-finish" });
|
|
805
821
|
}
|
|
806
822
|
return;
|
|
807
823
|
}
|
|
@@ -826,8 +842,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
826
842
|
}
|
|
827
843
|
}
|
|
828
844
|
case "message_start": {
|
|
829
|
-
usage.
|
|
830
|
-
usage.
|
|
845
|
+
usage.inputTokens = value.message.usage.input_tokens;
|
|
846
|
+
usage.outputTokens = value.message.usage.output_tokens;
|
|
831
847
|
providerMetadata = {
|
|
832
848
|
anthropic: {
|
|
833
849
|
cacheCreationInputTokens: (_a = value.message.usage.cache_creation_input_tokens) != null ? _a : null,
|
|
@@ -842,7 +858,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
842
858
|
return;
|
|
843
859
|
}
|
|
844
860
|
case "message_delta": {
|
|
845
|
-
usage.
|
|
861
|
+
usage.outputTokens = value.usage.output_tokens;
|
|
846
862
|
finishReason = mapAnthropicStopReason(value.delta.stop_reason);
|
|
847
863
|
return;
|
|
848
864
|
}
|
|
@@ -867,10 +883,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
867
883
|
}
|
|
868
884
|
})
|
|
869
885
|
),
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
warnings,
|
|
873
|
-
request: { body: JSON.stringify(body) }
|
|
886
|
+
request: { body },
|
|
887
|
+
response: { headers: responseHeaders }
|
|
874
888
|
};
|
|
875
889
|
}
|
|
876
890
|
};
|
|
@@ -997,6 +1011,10 @@ var anthropicProviderOptionsSchema = z2.object({
|
|
|
997
1011
|
budgetTokens: z2.number().optional()
|
|
998
1012
|
}).optional()
|
|
999
1013
|
});
|
|
1014
|
+
var anthropicReasoningMetadataSchema = z2.object({
|
|
1015
|
+
signature: z2.string().optional(),
|
|
1016
|
+
redactedData: z2.string().optional()
|
|
1017
|
+
});
|
|
1000
1018
|
|
|
1001
1019
|
// src/anthropic-tools.ts
|
|
1002
1020
|
import { z as z3 } from "zod";
|
|
@@ -1163,7 +1181,9 @@ function createAnthropic(options = {}) {
|
|
|
1163
1181
|
baseURL,
|
|
1164
1182
|
headers: getHeaders,
|
|
1165
1183
|
fetch: options.fetch,
|
|
1166
|
-
|
|
1184
|
+
getSupportedUrls: async () => ({
|
|
1185
|
+
"image/*": [/^https?:\/\/.*$/]
|
|
1186
|
+
})
|
|
1167
1187
|
});
|
|
1168
1188
|
const provider = function(modelId, settings) {
|
|
1169
1189
|
if (new.target) {
|