@clinebot/llms 0.0.7 → 0.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.browser.d.ts +2 -2
- package/dist/index.browser.js +40 -1
- package/dist/index.d.ts +2 -2
- package/dist/index.js +12 -12
- package/dist/providers/handlers/base.d.ts +2 -27
- package/dist/providers/transform/openai-format.d.ts +1 -1
- package/dist/providers/types/config.d.ts +6 -0
- package/package.json +2 -1
- package/src/index.browser.ts +2 -2
- package/src/index.ts +2 -2
- package/src/models/providers/vercel-ai-gateway.ts +1 -1
- package/src/providers/handlers/anthropic-base.ts +14 -3
- package/src/providers/handlers/base.test.ts +60 -1
- package/src/providers/handlers/base.ts +83 -54
- package/src/providers/handlers/bedrock-base.ts +1 -1
- package/src/providers/handlers/gemini-base.test.ts +40 -0
- package/src/providers/handlers/gemini-base.ts +16 -1
- package/src/providers/handlers/openai-base.ts +55 -11
- package/src/providers/handlers/vertex.ts +1 -1
- package/src/providers/transform/format-conversion.test.ts +26 -0
- package/src/providers/transform/openai-format.ts +50 -7
- package/src/providers/types/config.ts +8 -0
|
@@ -216,6 +216,32 @@ describe("format conversion", () => {
|
|
|
216
216
|
expect(openai[1].tool_calls[0].extra_content).toBeUndefined();
|
|
217
217
|
});
|
|
218
218
|
|
|
219
|
+
it("applies OpenAI cache markers only to the final user message", () => {
|
|
220
|
+
const messages: Message[] = [
|
|
221
|
+
{ role: "user", content: "first prompt" },
|
|
222
|
+
{ role: "assistant", content: "intermediate response" },
|
|
223
|
+
{ role: "user", content: "second prompt" },
|
|
224
|
+
];
|
|
225
|
+
|
|
226
|
+
const openai = convertToOpenAIMessages(messages, true) as any[];
|
|
227
|
+
expect(openai[0]).toMatchObject({ role: "user", content: "first prompt" });
|
|
228
|
+
expect(openai[2].role).toBe("user");
|
|
229
|
+
expect(openai[2].content).toMatchObject([
|
|
230
|
+
{
|
|
231
|
+
type: "text",
|
|
232
|
+
text: "second prompt",
|
|
233
|
+
cache_control: { type: "ephemeral" },
|
|
234
|
+
},
|
|
235
|
+
]);
|
|
236
|
+
|
|
237
|
+
const cacheMarkerCount = openai
|
|
238
|
+
.flatMap((message) =>
|
|
239
|
+
Array.isArray(message.content) ? message.content : [],
|
|
240
|
+
)
|
|
241
|
+
.filter((part) => part?.cache_control?.type === "ephemeral").length;
|
|
242
|
+
expect(cacheMarkerCount).toBe(1);
|
|
243
|
+
});
|
|
244
|
+
|
|
219
245
|
it("normalizes array-shaped tool_use input for openai replay", () => {
|
|
220
246
|
const messages: Message[] = [
|
|
221
247
|
{ role: "user", content: "run these" },
|
|
@@ -26,23 +26,49 @@ type OpenAIContentPart = OpenAI.Chat.ChatCompletionContentPart;
|
|
|
26
26
|
/**
|
|
27
27
|
* Convert messages to OpenAI format
|
|
28
28
|
*/
|
|
29
|
-
export function convertToOpenAIMessages(
|
|
30
|
-
|
|
29
|
+
export function convertToOpenAIMessages(
|
|
30
|
+
messages: Message[],
|
|
31
|
+
enableCaching = false,
|
|
32
|
+
): OpenAIMessage[] {
|
|
33
|
+
const lastUserIndex = enableCaching
|
|
34
|
+
? messages.map((m) => m.role).lastIndexOf("user")
|
|
35
|
+
: -1;
|
|
36
|
+
return messages.flatMap((message, index) =>
|
|
37
|
+
convertMessage(message, enableCaching && index === lastUserIndex),
|
|
38
|
+
);
|
|
31
39
|
}
|
|
32
40
|
|
|
33
|
-
function convertMessage(
|
|
41
|
+
function convertMessage(
|
|
42
|
+
message: Message,
|
|
43
|
+
addCacheControl: boolean,
|
|
44
|
+
): OpenAIMessage[] {
|
|
34
45
|
const { role, content } = message;
|
|
35
46
|
|
|
36
47
|
// Simple string content
|
|
37
48
|
if (typeof content === "string") {
|
|
38
|
-
|
|
49
|
+
if (role !== "user" || !addCacheControl) {
|
|
50
|
+
return [{ role, content } as OpenAIMessage];
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return [
|
|
54
|
+
{
|
|
55
|
+
role,
|
|
56
|
+
content: [
|
|
57
|
+
{
|
|
58
|
+
type: "text",
|
|
59
|
+
text: content,
|
|
60
|
+
cache_control: { type: "ephemeral" },
|
|
61
|
+
},
|
|
62
|
+
],
|
|
63
|
+
} as unknown as OpenAIMessage,
|
|
64
|
+
];
|
|
39
65
|
}
|
|
40
66
|
|
|
41
67
|
// Array content - need to process blocks
|
|
42
68
|
if (role === "assistant") {
|
|
43
69
|
return [convertAssistantMessage(content)];
|
|
44
70
|
} else {
|
|
45
|
-
return convertUserMessage(content);
|
|
71
|
+
return convertUserMessage(content, addCacheControl);
|
|
46
72
|
}
|
|
47
73
|
}
|
|
48
74
|
|
|
@@ -85,7 +111,10 @@ function convertAssistantMessage(content: ContentBlock[]): OpenAIMessage {
|
|
|
85
111
|
return message;
|
|
86
112
|
}
|
|
87
113
|
|
|
88
|
-
function convertUserMessage(
|
|
114
|
+
function convertUserMessage(
|
|
115
|
+
content: ContentBlock[],
|
|
116
|
+
addCacheControl: boolean,
|
|
117
|
+
): OpenAIMessage[] {
|
|
89
118
|
const messages: OpenAIMessage[] = [];
|
|
90
119
|
|
|
91
120
|
// Convert all tool results to separate tool messages
|
|
@@ -137,10 +166,24 @@ function convertUserMessage(content: ContentBlock[]): OpenAIMessage[] {
|
|
|
137
166
|
return messages;
|
|
138
167
|
}
|
|
139
168
|
|
|
169
|
+
if (addCacheControl) {
|
|
170
|
+
for (let i = parts.length - 1; i >= 0; i--) {
|
|
171
|
+
if (parts[i].type === "text") {
|
|
172
|
+
parts[i] = {
|
|
173
|
+
...(parts[i] as OpenAI.Chat.ChatCompletionContentPartText),
|
|
174
|
+
cache_control: { type: "ephemeral" },
|
|
175
|
+
} as unknown as OpenAIContentPart;
|
|
176
|
+
break;
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
|
|
140
181
|
messages.push({
|
|
141
182
|
role: "user",
|
|
142
183
|
content:
|
|
143
|
-
parts.length === 1 && parts[0].type === "text"
|
|
184
|
+
parts.length === 1 && parts[0].type === "text" && !addCacheControl
|
|
185
|
+
? parts[0].text
|
|
186
|
+
: (parts as unknown as OpenAI.Chat.ChatCompletionUserMessageParam["content"]),
|
|
144
187
|
});
|
|
145
188
|
|
|
146
189
|
return messages;
|
|
@@ -244,6 +244,11 @@ export interface ProviderOptions {
|
|
|
244
244
|
modelCatalog?: ModelCatalogConfig;
|
|
245
245
|
}
|
|
246
246
|
|
|
247
|
+
/**
|
|
248
|
+
* Provider-specific options that don't fit other categories
|
|
249
|
+
*/
|
|
250
|
+
import type { BasicLogger } from "@clinebot/shared";
|
|
251
|
+
|
|
247
252
|
/**
|
|
248
253
|
* Runtime model catalog refresh options
|
|
249
254
|
*/
|
|
@@ -299,6 +304,9 @@ export interface ProviderConfig
|
|
|
299
304
|
/** AbortSignal for cancelling requests */
|
|
300
305
|
abortSignal?: AbortSignal;
|
|
301
306
|
|
|
307
|
+
/** Optional runtime logger for provider-level diagnostics */
|
|
308
|
+
logger?: BasicLogger;
|
|
309
|
+
|
|
302
310
|
/** Codex CLI-specific options */
|
|
303
311
|
codex?: CodexConfig;
|
|
304
312
|
|