@clinebot/llms 0.0.10 → 0.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +10 -10
- package/dist/providers/handlers/ai-sdk-community.d.ts +1 -1
- package/dist/providers/handlers/base.d.ts +3 -2
- package/dist/providers/types/stream.d.ts +1 -1
- package/package.json +2 -2
- package/src/providers/handlers/ai-sdk-community.ts +5 -8
- package/src/providers/handlers/ai-sdk-provider-base.ts +12 -2
- package/src/providers/handlers/anthropic-base.test.ts +30 -0
- package/src/providers/handlers/anthropic-base.ts +31 -29
- package/src/providers/handlers/base.test.ts +8 -2
- package/src/providers/handlers/base.ts +22 -1
- package/src/providers/handlers/bedrock-base.ts +2 -2
- package/src/providers/handlers/community-sdk.test.ts +33 -0
- package/src/providers/handlers/gemini-base.ts +6 -19
- package/src/providers/handlers/openai-base.ts +19 -8
- package/src/providers/handlers/openai-responses.test.ts +46 -0
- package/src/providers/handlers/openai-responses.ts +3 -7
- package/src/providers/handlers/r1-base.ts +7 -8
- package/src/providers/handlers/vertex.ts +14 -4
- package/src/providers/transform/anthropic-format.ts +14 -2
- package/src/providers/transform/format-conversion.test.ts +23 -0
- package/src/providers/types/stream.ts +1 -1
|
@@ -32,12 +32,24 @@ export function convertToAnthropicMessages(
|
|
|
32
32
|
messages: Message[],
|
|
33
33
|
enableCaching = false,
|
|
34
34
|
): AnthropicMessage[] {
|
|
35
|
+
const userMessageIndices = messages.reduce<number[]>(
|
|
36
|
+
(indices, message, index) => {
|
|
37
|
+
if (message.role === "user") {
|
|
38
|
+
indices.push(index);
|
|
39
|
+
}
|
|
40
|
+
return indices;
|
|
41
|
+
},
|
|
42
|
+
[],
|
|
43
|
+
);
|
|
44
|
+
const cacheableMessageIndices = enableCaching
|
|
45
|
+
? new Set(userMessageIndices.slice(-2))
|
|
46
|
+
: new Set<number>();
|
|
35
47
|
const result: AnthropicMessage[] = [];
|
|
36
48
|
|
|
37
|
-
for (const message of messages) {
|
|
49
|
+
for (const [index, message] of messages.entries()) {
|
|
38
50
|
const converted = convertMessage(
|
|
39
51
|
message,
|
|
40
|
-
|
|
52
|
+
cacheableMessageIndices.has(index),
|
|
41
53
|
);
|
|
42
54
|
if (converted) {
|
|
43
55
|
result.push(converted);
|
|
@@ -285,10 +285,33 @@ describe("format conversion", () => {
|
|
|
285
285
|
];
|
|
286
286
|
|
|
287
287
|
const anthropic = convertToAnthropicMessages(messages, true) as any[];
|
|
288
|
+
expect(anthropic[0].content[0].cache_control).toEqual({
|
|
289
|
+
type: "ephemeral",
|
|
290
|
+
});
|
|
288
291
|
expect(anthropic[1].content[0].type).toBe("thinking");
|
|
289
292
|
expect(anthropic[1].content[0].signature).toBe("anthropic-sig");
|
|
290
293
|
});
|
|
291
294
|
|
|
295
|
+
it("applies anthropic cache markers to the last two user messages", () => {
|
|
296
|
+
const messages: Message[] = [
|
|
297
|
+
{ role: "user", content: "first prompt" },
|
|
298
|
+
{ role: "assistant", content: "intermediate response" },
|
|
299
|
+
{ role: "user", content: "second prompt" },
|
|
300
|
+
{ role: "assistant", content: "another response" },
|
|
301
|
+
{ role: "user", content: "third prompt" },
|
|
302
|
+
];
|
|
303
|
+
|
|
304
|
+
const anthropic = convertToAnthropicMessages(messages, true) as any[];
|
|
305
|
+
|
|
306
|
+
expect(anthropic[0].content[0].cache_control).toBeUndefined();
|
|
307
|
+
expect(anthropic[2].content[0].cache_control).toEqual({
|
|
308
|
+
type: "ephemeral",
|
|
309
|
+
});
|
|
310
|
+
expect(anthropic[4].content[0].cache_control).toEqual({
|
|
311
|
+
type: "ephemeral",
|
|
312
|
+
});
|
|
313
|
+
});
|
|
314
|
+
|
|
292
315
|
it("normalizes array-shaped tool_use input for anthropic replay", () => {
|
|
293
316
|
const messages: Message[] = [
|
|
294
317
|
{ role: "user", content: "run these" },
|
|
@@ -55,7 +55,7 @@ export interface ApiStreamReasoningChunk {
|
|
|
55
55
|
*/
|
|
56
56
|
export interface ApiStreamUsageChunk {
|
|
57
57
|
type: "usage";
|
|
58
|
-
/**
|
|
58
|
+
/** Total number of input tokens reported by the provider */
|
|
59
59
|
inputTokens: number;
|
|
60
60
|
/** Number of output tokens */
|
|
61
61
|
outputTokens: number;
|