@ai-sdk/anthropic 2.0.52 → 2.0.53
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +29 -50
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +29 -51
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +28 -49
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +28 -50
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
|
@@ -11,7 +11,7 @@ import {
|
|
|
11
11
|
} from "@ai-sdk/provider-utils";
|
|
12
12
|
|
|
13
13
|
// src/version.ts
|
|
14
|
-
var VERSION = true ? "2.0.
|
|
14
|
+
var VERSION = true ? "2.0.53" : "0.0.0-test";
|
|
15
15
|
|
|
16
16
|
// src/anthropic-messages-language-model.ts
|
|
17
17
|
import {
|
|
@@ -22,7 +22,6 @@ import {
|
|
|
22
22
|
combineHeaders,
|
|
23
23
|
createEventSourceResponseHandler,
|
|
24
24
|
createJsonResponseHandler,
|
|
25
|
-
DelayedPromise,
|
|
26
25
|
generateId,
|
|
27
26
|
parseProviderOptions as parseProviderOptions2,
|
|
28
27
|
postJsonToApi,
|
|
@@ -2334,6 +2333,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2334
2333
|
};
|
|
2335
2334
|
}
|
|
2336
2335
|
async doStream(options) {
|
|
2336
|
+
var _a, _b;
|
|
2337
2337
|
const { args, warnings, betas, usesJsonResponseTool } = await this.getArgs({
|
|
2338
2338
|
...options,
|
|
2339
2339
|
userSuppliedBetas: await this.getBetasFromHeaders(options.headers)
|
|
@@ -2365,25 +2365,13 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2365
2365
|
let container = null;
|
|
2366
2366
|
let blockType = void 0;
|
|
2367
2367
|
const generateId3 = this.generateId;
|
|
2368
|
-
let isFirstChunk = true;
|
|
2369
|
-
let stream = void 0;
|
|
2370
|
-
const returnPromise = new DelayedPromise();
|
|
2371
2368
|
const transformedStream = response.pipeThrough(
|
|
2372
2369
|
new TransformStream({
|
|
2373
2370
|
start(controller) {
|
|
2374
2371
|
controller.enqueue({ type: "stream-start", warnings });
|
|
2375
2372
|
},
|
|
2376
|
-
async flush() {
|
|
2377
|
-
if (returnPromise.isPending()) {
|
|
2378
|
-
returnPromise.resolve({
|
|
2379
|
-
stream,
|
|
2380
|
-
request: { body },
|
|
2381
|
-
response: { headers: responseHeaders }
|
|
2382
|
-
});
|
|
2383
|
-
}
|
|
2384
|
-
},
|
|
2385
2373
|
transform(chunk, controller) {
|
|
2386
|
-
var
|
|
2374
|
+
var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
2387
2375
|
if (options.includeRawChunks) {
|
|
2388
2376
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
2389
2377
|
}
|
|
@@ -2391,29 +2379,6 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2391
2379
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
2392
2380
|
return;
|
|
2393
2381
|
}
|
|
2394
|
-
if (isFirstChunk) {
|
|
2395
|
-
if (chunk.value.type === "error") {
|
|
2396
|
-
returnPromise.reject(
|
|
2397
|
-
new APICallError({
|
|
2398
|
-
message: chunk.value.error.message,
|
|
2399
|
-
url,
|
|
2400
|
-
requestBodyValues: body,
|
|
2401
|
-
statusCode: chunk.value.error.type === "overloaded_error" ? 529 : 500,
|
|
2402
|
-
responseHeaders,
|
|
2403
|
-
responseBody: JSON.stringify(chunk.value.error),
|
|
2404
|
-
isRetryable: chunk.value.error.type === "overloaded_error"
|
|
2405
|
-
})
|
|
2406
|
-
);
|
|
2407
|
-
controller.terminate();
|
|
2408
|
-
return;
|
|
2409
|
-
}
|
|
2410
|
-
isFirstChunk = false;
|
|
2411
|
-
returnPromise.resolve({
|
|
2412
|
-
stream,
|
|
2413
|
-
request: { body },
|
|
2414
|
-
response: { headers: responseHeaders }
|
|
2415
|
-
});
|
|
2416
|
-
}
|
|
2417
2382
|
const value = chunk.value;
|
|
2418
2383
|
switch (value.type) {
|
|
2419
2384
|
case "ping": {
|
|
@@ -2545,11 +2510,11 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2545
2510
|
toolCallId: part.tool_use_id,
|
|
2546
2511
|
toolName: "web_search",
|
|
2547
2512
|
result: part.content.map((result) => {
|
|
2548
|
-
var
|
|
2513
|
+
var _a3;
|
|
2549
2514
|
return {
|
|
2550
2515
|
url: result.url,
|
|
2551
2516
|
title: result.title,
|
|
2552
|
-
pageAge: (
|
|
2517
|
+
pageAge: (_a3 = result.page_age) != null ? _a3 : null,
|
|
2553
2518
|
encryptedContent: result.encrypted_content,
|
|
2554
2519
|
type: result.type
|
|
2555
2520
|
};
|
|
@@ -2565,7 +2530,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2565
2530
|
title: result.title,
|
|
2566
2531
|
providerMetadata: {
|
|
2567
2532
|
anthropic: {
|
|
2568
|
-
pageAge: (
|
|
2533
|
+
pageAge: (_a2 = result.page_age) != null ? _a2 : null
|
|
2569
2534
|
}
|
|
2570
2535
|
}
|
|
2571
2536
|
});
|
|
@@ -2768,7 +2733,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2768
2733
|
}
|
|
2769
2734
|
case "message_start": {
|
|
2770
2735
|
usage.inputTokens = value.message.usage.input_tokens;
|
|
2771
|
-
usage.cachedInputTokens = (
|
|
2736
|
+
usage.cachedInputTokens = (_b2 = value.message.usage.cache_read_input_tokens) != null ? _b2 : void 0;
|
|
2772
2737
|
rawUsage = {
|
|
2773
2738
|
...value.message.usage
|
|
2774
2739
|
};
|
|
@@ -2832,22 +2797,35 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
2832
2797
|
})
|
|
2833
2798
|
);
|
|
2834
2799
|
const [streamForFirstChunk, streamForConsumer] = transformedStream.tee();
|
|
2835
|
-
stream = streamForConsumer;
|
|
2836
2800
|
const firstChunkReader = streamForFirstChunk.getReader();
|
|
2837
2801
|
try {
|
|
2838
|
-
|
|
2839
|
-
|
|
2840
|
-
|
|
2802
|
+
await firstChunkReader.read();
|
|
2803
|
+
let result = await firstChunkReader.read();
|
|
2804
|
+
if (((_a = result.value) == null ? void 0 : _a.type) === "raw") {
|
|
2805
|
+
result = await firstChunkReader.read();
|
|
2841
2806
|
}
|
|
2842
|
-
|
|
2843
|
-
|
|
2844
|
-
|
|
2845
|
-
|
|
2807
|
+
if (((_b = result.value) == null ? void 0 : _b.type) === "error") {
|
|
2808
|
+
const error = result.value.error;
|
|
2809
|
+
throw new APICallError({
|
|
2810
|
+
message: error.message,
|
|
2811
|
+
url,
|
|
2812
|
+
requestBodyValues: body,
|
|
2813
|
+
statusCode: error.type === "overloaded_error" ? 529 : 500,
|
|
2814
|
+
responseHeaders,
|
|
2815
|
+
responseBody: JSON.stringify(error),
|
|
2816
|
+
isRetryable: error.type === "overloaded_error"
|
|
2817
|
+
});
|
|
2846
2818
|
}
|
|
2847
2819
|
} finally {
|
|
2820
|
+
firstChunkReader.cancel().catch(() => {
|
|
2821
|
+
});
|
|
2848
2822
|
firstChunkReader.releaseLock();
|
|
2849
2823
|
}
|
|
2850
|
-
return
|
|
2824
|
+
return {
|
|
2825
|
+
stream: streamForConsumer,
|
|
2826
|
+
request: { body },
|
|
2827
|
+
response: { headers: responseHeaders }
|
|
2828
|
+
};
|
|
2851
2829
|
}
|
|
2852
2830
|
};
|
|
2853
2831
|
function getModelCapabilities(modelId) {
|