@ai-sdk/anthropic 2.0.22 → 2.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +14 -6
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +14 -6
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +13 -5
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +13 -5
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.mjs
CHANGED
|
@@ -1183,7 +1183,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1183
1183
|
});
|
|
1184
1184
|
}
|
|
1185
1185
|
async doGenerate(options) {
|
|
1186
|
-
var _a, _b, _c, _d, _e;
|
|
1186
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1187
1187
|
const { args, warnings, betas, usesJsonResponseTool } = await this.getArgs(options);
|
|
1188
1188
|
const citationDocuments = this.extractCitationDocuments(options.prompt);
|
|
1189
1189
|
const {
|
|
@@ -1410,7 +1410,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1410
1410
|
providerMetadata: {
|
|
1411
1411
|
anthropic: {
|
|
1412
1412
|
usage: response.usage,
|
|
1413
|
-
cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null
|
|
1413
|
+
cacheCreationInputTokens: (_e = response.usage.cache_creation_input_tokens) != null ? _e : null,
|
|
1414
|
+
stopSequence: (_f = response.stop_sequence) != null ? _f : null
|
|
1414
1415
|
}
|
|
1415
1416
|
}
|
|
1416
1417
|
};
|
|
@@ -1439,6 +1440,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1439
1440
|
const contentBlocks = {};
|
|
1440
1441
|
let rawUsage = void 0;
|
|
1441
1442
|
let cacheCreationInputTokens = null;
|
|
1443
|
+
let stopSequence = null;
|
|
1442
1444
|
let blockType = void 0;
|
|
1443
1445
|
const generateId2 = this.generateId;
|
|
1444
1446
|
return {
|
|
@@ -1448,7 +1450,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1448
1450
|
controller.enqueue({ type: "stream-start", warnings });
|
|
1449
1451
|
},
|
|
1450
1452
|
transform(chunk, controller) {
|
|
1451
|
-
var _a, _b, _c, _d, _e, _f, _g;
|
|
1453
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
1452
1454
|
if (options.includeRawChunks) {
|
|
1453
1455
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
1454
1456
|
}
|
|
@@ -1786,6 +1788,7 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1786
1788
|
finishReason: value.delta.stop_reason,
|
|
1787
1789
|
isJsonResponseFromTool: usesJsonResponseTool
|
|
1788
1790
|
});
|
|
1791
|
+
stopSequence = (_h = value.delta.stop_sequence) != null ? _h : null;
|
|
1789
1792
|
rawUsage = {
|
|
1790
1793
|
...rawUsage,
|
|
1791
1794
|
...value.usage
|
|
@@ -1800,7 +1803,8 @@ var AnthropicMessagesLanguageModel = class {
|
|
|
1800
1803
|
providerMetadata: {
|
|
1801
1804
|
anthropic: {
|
|
1802
1805
|
usage: rawUsage != null ? rawUsage : null,
|
|
1803
|
-
cacheCreationInputTokens
|
|
1806
|
+
cacheCreationInputTokens,
|
|
1807
|
+
stopSequence
|
|
1804
1808
|
}
|
|
1805
1809
|
}
|
|
1806
1810
|
});
|
|
@@ -1918,6 +1922,7 @@ var anthropicMessagesResponseSchema = z7.object({
|
|
|
1918
1922
|
])
|
|
1919
1923
|
),
|
|
1920
1924
|
stop_reason: z7.string().nullish(),
|
|
1925
|
+
stop_sequence: z7.string().nullish(),
|
|
1921
1926
|
usage: z7.looseObject({
|
|
1922
1927
|
input_tokens: z7.number(),
|
|
1923
1928
|
output_tokens: z7.number(),
|
|
@@ -2066,7 +2071,10 @@ var anthropicMessagesChunkSchema = z7.discriminatedUnion("type", [
|
|
|
2066
2071
|
}),
|
|
2067
2072
|
z7.object({
|
|
2068
2073
|
type: z7.literal("message_delta"),
|
|
2069
|
-
delta: z7.object({
|
|
2074
|
+
delta: z7.object({
|
|
2075
|
+
stop_reason: z7.string().nullish(),
|
|
2076
|
+
stop_sequence: z7.string().nullish()
|
|
2077
|
+
}),
|
|
2070
2078
|
usage: z7.looseObject({
|
|
2071
2079
|
output_tokens: z7.number(),
|
|
2072
2080
|
cache_creation_input_tokens: z7.number().nullish()
|