zidane 4.0.2 → 4.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/README.md +196 -614
  2. package/dist/agent-BoV5Twdl.d.ts +2347 -0
  3. package/dist/agent-BoV5Twdl.d.ts.map +1 -0
  4. package/dist/contexts-3Arvn7yR.js +321 -0
  5. package/dist/contexts-3Arvn7yR.js.map +1 -0
  6. package/dist/contexts.d.ts +2 -25
  7. package/dist/contexts.js +2 -10
  8. package/dist/errors-D1lhd6mX.js +118 -0
  9. package/dist/errors-D1lhd6mX.js.map +1 -0
  10. package/dist/index-28otmfLX.d.ts +400 -0
  11. package/dist/index-28otmfLX.d.ts.map +1 -0
  12. package/dist/index-BfSdALzk.d.ts +113 -0
  13. package/dist/index-BfSdALzk.d.ts.map +1 -0
  14. package/dist/index-DPsd0qwm.d.ts +254 -0
  15. package/dist/index-DPsd0qwm.d.ts.map +1 -0
  16. package/dist/index.d.ts +5 -95
  17. package/dist/index.js +141 -271
  18. package/dist/index.js.map +1 -0
  19. package/dist/interpolate-CukJwP2G.js +887 -0
  20. package/dist/interpolate-CukJwP2G.js.map +1 -0
  21. package/dist/mcp-8wClKY-3.js +771 -0
  22. package/dist/mcp-8wClKY-3.js.map +1 -0
  23. package/dist/mcp.d.ts +2 -4
  24. package/dist/mcp.js +2 -13
  25. package/dist/messages-z5Pq20p7.js +1020 -0
  26. package/dist/messages-z5Pq20p7.js.map +1 -0
  27. package/dist/presets-Cs7_CsMk.js +39 -0
  28. package/dist/presets-Cs7_CsMk.js.map +1 -0
  29. package/dist/presets.d.ts +2 -43
  30. package/dist/presets.js +2 -17
  31. package/dist/providers-CX-R-Oy-.js +969 -0
  32. package/dist/providers-CX-R-Oy-.js.map +1 -0
  33. package/dist/providers.d.ts +2 -4
  34. package/dist/providers.js +3 -23
  35. package/dist/session/sqlite.d.ts +7 -12
  36. package/dist/session/sqlite.d.ts.map +1 -0
  37. package/dist/session/sqlite.js +67 -79
  38. package/dist/session/sqlite.js.map +1 -0
  39. package/dist/session-Cn68UASv.js +440 -0
  40. package/dist/session-Cn68UASv.js.map +1 -0
  41. package/dist/session.d.ts +2 -4
  42. package/dist/session.js +3 -27
  43. package/dist/skills.d.ts +3 -322
  44. package/dist/skills.js +24 -47
  45. package/dist/skills.js.map +1 -0
  46. package/dist/stats-DoKUtF5T.js +58 -0
  47. package/dist/stats-DoKUtF5T.js.map +1 -0
  48. package/dist/tools-DpeWKzP1.js +3941 -0
  49. package/dist/tools-DpeWKzP1.js.map +1 -0
  50. package/dist/tools.d.ts +3 -95
  51. package/dist/tools.js +2 -40
  52. package/dist/tui.d.ts +533 -0
  53. package/dist/tui.d.ts.map +1 -0
  54. package/dist/tui.js +2004 -0
  55. package/dist/tui.js.map +1 -0
  56. package/dist/types-Bx_F8jet.js +39 -0
  57. package/dist/types-Bx_F8jet.js.map +1 -0
  58. package/dist/types.d.ts +4 -55
  59. package/dist/types.js +4 -28
  60. package/package.json +38 -4
  61. package/dist/agent-BAHrGtqu.d.ts +0 -2425
  62. package/dist/chunk-4ILGBQ23.js +0 -803
  63. package/dist/chunk-4LPBN547.js +0 -3540
  64. package/dist/chunk-64LLNY7F.js +0 -28
  65. package/dist/chunk-6STZTA4N.js +0 -830
  66. package/dist/chunk-7GQ7P6DM.js +0 -566
  67. package/dist/chunk-IC7FT4OD.js +0 -37
  68. package/dist/chunk-JCOB6IYO.js +0 -22
  69. package/dist/chunk-JH6IAAFA.js +0 -28
  70. package/dist/chunk-LNN5UTS2.js +0 -97
  71. package/dist/chunk-PMCQOMV4.js +0 -490
  72. package/dist/chunk-UD25QF3H.js +0 -304
  73. package/dist/chunk-W57VY6DJ.js +0 -834
  74. package/dist/sandbox-D7v6Wy62.d.ts +0 -28
  75. package/dist/skills-use-DwZrNmcw.d.ts +0 -80
  76. package/dist/types-Bai5rKpa.d.ts +0 -89
  77. package/dist/validation-Pm--dQEU.d.ts +0 -185
@@ -1,803 +0,0 @@
1
- import {
2
- matchesContextExceeded
3
- } from "./chunk-LNN5UTS2.js";
4
-
5
- // src/providers/openai-compat.ts
6
- var TOOL_RESULTS_TAG = "__zidane_tool_results__";
7
- var ASSISTANT_TOOL_CALLS_TAG = "__zidane_assistant_tc__";
8
- var SSE_MAX_BUFFER_BYTES = 8 * 1024 * 1024;
9
- var OpenAICompatStreamError = class extends Error {
10
- constructor(message) {
11
- super(message);
12
- this.name = "OpenAICompatStreamError";
13
- }
14
- };
15
- async function consumeSSE(response, callbacks, signal) {
16
- const reader = response.body.getReader();
17
- const decoder = new TextDecoder();
18
- let buffer = "";
19
- let text = "";
20
- let thinking = "";
21
- let finishReason = "stop";
22
- let usage = { input: 0, output: 0 };
23
- const tcMap = /* @__PURE__ */ new Map();
24
- const reasoningMap = /* @__PURE__ */ new Map();
25
- let sawReasoningDetails = false;
26
- try {
27
- while (true) {
28
- if (signal?.aborted)
29
- break;
30
- const { done, value } = await reader.read();
31
- if (done)
32
- break;
33
- buffer += decoder.decode(value, { stream: true });
34
- if (buffer.length > SSE_MAX_BUFFER_BYTES) {
35
- throw new OpenAICompatStreamError(
36
- `SSE buffer exceeded ${SSE_MAX_BUFFER_BYTES} bytes without a line boundary \u2014 upstream may be streaming non-SSE data.`
37
- );
38
- }
39
- const lines = buffer.split("\n");
40
- buffer = lines.pop() || "";
41
- for (const line of lines) {
42
- if (!line.startsWith("data: "))
43
- continue;
44
- const data = line.slice(6).trim();
45
- if (data === "[DONE]")
46
- continue;
47
- let chunk;
48
- try {
49
- chunk = JSON.parse(data);
50
- } catch {
51
- continue;
52
- }
53
- const choices = chunk.choices;
54
- const choice = choices?.[0];
55
- if (!choice)
56
- continue;
57
- const fr = choice.finish_reason;
58
- if (fr)
59
- finishReason = fr;
60
- const delta = choice.delta;
61
- const reasoningDeltaArr = delta?.reasoning_details;
62
- if (reasoningDeltaArr && reasoningDeltaArr.length > 0) {
63
- sawReasoningDetails = true;
64
- for (const item of reasoningDeltaArr) {
65
- const idx = typeof item.index === "number" ? item.index : 0;
66
- const existing = reasoningMap.get(idx) ?? {};
67
- if (typeof item.text === "string") {
68
- existing.text = (existing.text ?? "") + item.text;
69
- thinking += item.text;
70
- callbacks.onThinking?.(item.text);
71
- }
72
- if (typeof item.summary === "string") {
73
- existing.summary = (existing.summary ?? "") + item.summary;
74
- thinking += item.summary;
75
- callbacks.onThinking?.(item.summary);
76
- }
77
- for (const key of ["type", "signature", "data", "format", "id"]) {
78
- const v = item[key];
79
- if (typeof v === "string")
80
- existing[key] = v;
81
- }
82
- reasoningMap.set(idx, existing);
83
- }
84
- }
85
- if (!sawReasoningDetails) {
86
- const thinkingDelta = delta?.reasoning_content ?? delta?.reasoning;
87
- if (thinkingDelta) {
88
- thinking += thinkingDelta;
89
- callbacks.onThinking?.(thinkingDelta);
90
- }
91
- }
92
- const contentDelta = delta?.content;
93
- if (contentDelta) {
94
- text += contentDelta;
95
- callbacks.onText(contentDelta);
96
- }
97
- const toolCallsDelta = delta?.tool_calls;
98
- if (toolCallsDelta) {
99
- for (const tc of toolCallsDelta) {
100
- const existing = tcMap.get(tc.index);
101
- if (existing) {
102
- if (tc.function?.arguments)
103
- existing.args += tc.function.arguments;
104
- } else {
105
- tcMap.set(tc.index, {
106
- id: tc.id || `call_${tc.index}`,
107
- name: tc.function?.name || "",
108
- args: tc.function?.arguments || ""
109
- });
110
- }
111
- }
112
- }
113
- const chunkUsage = chunk.usage;
114
- if (chunkUsage) {
115
- const cachedRead = chunkUsage.prompt_tokens_details?.cached_tokens;
116
- const cachedWrite = chunkUsage.prompt_tokens_details?.cache_creation_input_tokens ?? chunkUsage.prompt_tokens_details?.cache_write_tokens ?? chunkUsage.cache_creation_input_tokens;
117
- usage = {
118
- input: chunkUsage.prompt_tokens ?? 0,
119
- output: chunkUsage.completion_tokens ?? 0,
120
- cost: chunkUsage.total_cost ?? void 0,
121
- ...typeof cachedRead === "number" && cachedRead > 0 ? { cacheRead: cachedRead } : {},
122
- ...typeof cachedWrite === "number" && cachedWrite > 0 ? { cacheCreation: cachedWrite } : {}
123
- };
124
- }
125
- }
126
- }
127
- } finally {
128
- reader.releaseLock();
129
- }
130
- const toolCalls = [];
131
- for (const tc of tcMap.values()) {
132
- if (!tc.args) {
133
- toolCalls.push({ id: tc.id, name: tc.name, input: {} });
134
- continue;
135
- }
136
- try {
137
- toolCalls.push({ id: tc.id, name: tc.name, input: JSON.parse(tc.args) });
138
- } catch (err) {
139
- throw new OpenAICompatStreamError(
140
- `Tool call "${tc.name}" (${tc.id}) arguments were truncated or malformed: ${err.message}`
141
- );
142
- }
143
- }
144
- const reasoningDetails = Array.from(reasoningMap.entries()).sort(([a], [b]) => a - b).map(([, item]) => item);
145
- return { text, thinking, toolCalls, finishReason, usage, reasoningDetails };
146
- }
147
- function toImageUrlPart(img) {
148
- return {
149
- type: "image_url",
150
- image_url: { url: `data:${img.mediaType};base64,${img.data}` }
151
- };
152
- }
153
- function summarizeToolResultOutput(output) {
154
- if (typeof output === "string")
155
- return { text: output, images: [] };
156
- const texts = [];
157
- const images = [];
158
- for (const block of output) {
159
- if (block.type === "text")
160
- texts.push(block.text);
161
- else if (block.type === "image")
162
- images.push({ mediaType: block.mediaType, data: block.data });
163
- }
164
- return { text: texts.join("\n"), images };
165
- }
166
- function toOAIMessages(system, messages, options = {}) {
167
- const out = [{ role: "system", content: system }];
168
- const nativeImageInTool = options.imageInToolResult === true;
169
- const reasoningEnabled = options.supportsReasoning === true;
170
- const activeModel = options.model;
171
- for (const msg of messages) {
172
- const toolResults = msg.content.filter((b) => b.type === "tool_result");
173
- const toolCalls = msg.content.filter((b) => b.type === "tool_call");
174
- const textBlocks = msg.content.filter((b) => b.type === "text");
175
- const imageBlocks = msg.content.filter((b) => b.type === "image");
176
- const reasoningBlocks = reasoningEnabled ? msg.content.filter((b) => {
177
- if (b.type !== "provider_reasoning")
178
- return false;
179
- if (b.producer !== "openrouter")
180
- return false;
181
- if (b.model && activeModel && b.model !== activeModel)
182
- return false;
183
- return true;
184
- }) : [];
185
- const reasoningDetails = reasoningBlocks.flatMap((b) => b.details);
186
- if (toolResults.length > 0) {
187
- for (const tr of toolResults) {
188
- if (typeof tr.output === "string") {
189
- out.push({ role: "tool", tool_call_id: tr.callId, content: tr.output });
190
- continue;
191
- }
192
- if (nativeImageInTool) {
193
- const parts = tr.output.map((block) => block.type === "image" ? toImageUrlPart({ mediaType: block.mediaType, data: block.data }) : { type: "text", text: block.text });
194
- out.push({ role: "tool", tool_call_id: tr.callId, content: parts });
195
- continue;
196
- }
197
- const { text, images } = summarizeToolResultOutput(tr.output);
198
- if (images.length === 0) {
199
- out.push({ role: "tool", tool_call_id: tr.callId, content: text });
200
- continue;
201
- }
202
- const noun = images.length === 1 ? "image" : "images";
203
- const attachedMarker = `[${images.length} ${noun} attached \u2014 see next user message]`;
204
- const toolMarker = text.length > 0 ? `${text}
205
-
206
- ${attachedMarker}` : attachedMarker;
207
- out.push({ role: "tool", tool_call_id: tr.callId, content: toolMarker });
208
- out.push({
209
- role: "user",
210
- content: [
211
- ...images.map(toImageUrlPart),
212
- { type: "text", text: `(${noun} returned by tool call ${tr.callId})` }
213
- ]
214
- });
215
- }
216
- continue;
217
- }
218
- if (toolCalls.length > 0) {
219
- const textContent = textBlocks.length > 0 ? textBlocks[0].text : null;
220
- const m = {
221
- role: "assistant",
222
- content: textContent,
223
- tool_calls: toolCalls.map((tc) => ({
224
- id: tc.id,
225
- type: "function",
226
- function: { name: tc.name, arguments: JSON.stringify(tc.input) }
227
- }))
228
- };
229
- if (reasoningDetails.length > 0)
230
- m.reasoning_details = reasoningDetails;
231
- out.push(m);
232
- continue;
233
- }
234
- if (imageBlocks.length > 0) {
235
- const parts = imageBlocks.map((img) => ({
236
- type: "image_url",
237
- image_url: { url: `data:${img.mediaType};base64,${img.data}` }
238
- }));
239
- for (const b of textBlocks) {
240
- parts.push({ type: "text", text: b.text });
241
- }
242
- const m = { role: msg.role, content: parts };
243
- if (msg.role === "assistant" && reasoningDetails.length > 0)
244
- m.reasoning_details = reasoningDetails;
245
- out.push(m);
246
- continue;
247
- }
248
- let pushed;
249
- if (textBlocks.length === 1) {
250
- pushed = { role: msg.role, content: textBlocks[0].text };
251
- } else if (textBlocks.length > 1) {
252
- pushed = { role: msg.role, content: textBlocks.map((b) => ({ type: "text", text: b.text })) };
253
- } else {
254
- pushed = { role: msg.role, content: null };
255
- }
256
- if (msg.role === "assistant" && reasoningDetails.length > 0)
257
- pushed.reasoning_details = reasoningDetails;
258
- out.push(pushed);
259
- }
260
- return out;
261
- }
262
- var EPHEMERAL = { type: "ephemeral" };
263
- function applyOAICacheBreakpoints(messages) {
264
- if (messages.length === 0)
265
- return;
266
- const first = messages[0];
267
- if (first.role === "system")
268
- markLastContentPart(first);
269
- const lastIdx = messages.length - 1;
270
- if (lastIdx > 0)
271
- markLastContentPart(messages[lastIdx]);
272
- }
273
- function markLastContentPart(msg) {
274
- if (typeof msg.content === "string") {
275
- if (msg.content.length === 0)
276
- return;
277
- msg.content = [{ type: "text", text: msg.content, cache_control: EPHEMERAL }];
278
- return;
279
- }
280
- if (!Array.isArray(msg.content) || msg.content.length === 0)
281
- return;
282
- const parts = msg.content;
283
- const lastBlockIdx = parts.length - 1;
284
- parts[lastBlockIdx] = { ...parts[lastBlockIdx], cache_control: EPHEMERAL };
285
- }
286
- function applyOAIToolCacheBreakpoint(tools) {
287
- if (tools.length === 0)
288
- return tools;
289
- const lastIdx = tools.length - 1;
290
- return tools.map(
291
- (tool, i) => i === lastIdx ? { ...tool, cache_control: EPHEMERAL } : tool
292
- );
293
- }
294
- function formatTools(tools) {
295
- return tools.map((t) => ({
296
- type: "function",
297
- function: { name: t.name, description: t.description, parameters: t.inputSchema }
298
- }));
299
- }
300
- function userMessage(content) {
301
- return { role: "user", content: [{ type: "text", text: content }] };
302
- }
303
- function assistantMessage(content) {
304
- return { role: "assistant", content: [{ type: "text", text: content }] };
305
- }
306
- function toolResultsMessage(results) {
307
- return {
308
- role: "user",
309
- content: results.map((r) => ({
310
- type: "tool_result",
311
- callId: r.id,
312
- output: r.content
313
- }))
314
- };
315
- }
316
- function buildAssistantContent(text, toolCalls, thinking, reasoning) {
317
- const content = [];
318
- if (reasoning && reasoning.details.length > 0) {
319
- const block = {
320
- type: "provider_reasoning",
321
- producer: reasoning.producer,
322
- details: reasoning.details
323
- };
324
- if (reasoning.model)
325
- block.model = reasoning.model;
326
- content.push(block);
327
- }
328
- if (thinking)
329
- content.push({ type: "thinking", text: thinking });
330
- if (text)
331
- content.push({ type: "text", text });
332
- for (const tc of toolCalls) {
333
- content.push({ type: "tool_call", id: tc.id, name: tc.name, input: tc.input });
334
- }
335
- return { role: "assistant", content };
336
- }
337
- var OpenAICompatHttpError = class extends Error {
338
- status;
339
- providerCode;
340
- bodyText;
341
- constructor(status, bodyText) {
342
- let message = bodyText;
343
- let code;
344
- try {
345
- const parsed = JSON.parse(bodyText);
346
- message = parsed?.error?.message ?? bodyText;
347
- code = parsed?.error?.code ?? parsed?.error?.type;
348
- } catch {
349
- }
350
- super(`HTTP ${status}: ${message}`);
351
- this.name = "OpenAICompatHttpError";
352
- this.status = status;
353
- this.providerCode = code;
354
- this.bodyText = bodyText;
355
- }
356
- };
357
- var TRAILING_SLASH_RE = /\/$/;
358
- function classifyOpenAICompatError(err) {
359
- if (!err || typeof err !== "object")
360
- return null;
361
- if (err.name === "AbortError")
362
- return { kind: "aborted" };
363
- if (err instanceof OpenAICompatStreamError) {
364
- return {
365
- kind: "provider_error",
366
- providerCode: "stream_error",
367
- message: err.message,
368
- retryable: true
369
- };
370
- }
371
- if (!(err instanceof OpenAICompatHttpError))
372
- return null;
373
- const code = err.providerCode;
374
- const msg = err.message;
375
- if (code === "context_length_exceeded" || matchesContextExceeded(msg)) {
376
- return {
377
- kind: "context_exceeded",
378
- providerCode: code ?? "context_length_exceeded",
379
- message: msg
380
- };
381
- }
382
- return {
383
- kind: "provider_error",
384
- providerCode: code ?? String(err.status),
385
- message: msg,
386
- retryable: isRetryableHttpStatus(err.status)
387
- };
388
- }
389
- function isRetryableHttpStatus(status) {
390
- if (status === 429)
391
- return true;
392
- if (status >= 500 && status !== 501)
393
- return true;
394
- return false;
395
- }
396
- function mapOAIFinishReason(reason) {
397
- if (!reason)
398
- return void 0;
399
- switch (reason) {
400
- case "stop":
401
- return "stop";
402
- case "tool_calls":
403
- case "function_call":
404
- return "tool-calls";
405
- case "length":
406
- return "length";
407
- case "content_filter":
408
- return "content-filter";
409
- default:
410
- return "other";
411
- }
412
- }
413
- function planOpenRouterReasoning(thinking, thinkingBudget) {
414
- if ((!thinking || thinking === "off") && typeof thinkingBudget !== "number")
415
- return void 0;
416
- const out = {};
417
- if (thinking && thinking !== "off" && thinking !== "adaptive") {
418
- out.effort = thinking === "minimal" ? "low" : thinking;
419
- }
420
- if (typeof thinkingBudget === "number" && thinkingBudget > 0) {
421
- out.max_tokens = thinkingBudget;
422
- }
423
- return out;
424
- }
425
- function openaiCompat(params) {
426
- const name = params.name ?? "openai-compat";
427
- const defaultModel = params.defaultModel ?? "gpt-4o-mini";
428
- const authHeaderName = params.authHeader?.name ?? "Authorization";
429
- const authHeaderValue = params.authHeader?.scheme ? `${params.authHeader.scheme} ${params.apiKey}` : params.authHeader ? params.apiKey : `Bearer ${params.apiKey}`;
430
- const extraHeaders = params.extraHeaders ?? {};
431
- const endpoint = `${params.baseURL.replace(TRAILING_SLASH_RE, "")}/chat/completions`;
432
- const capabilities = {
433
- vision: params.capabilities?.vision ?? false,
434
- imageInToolResult: params.capabilities?.imageInToolResult ?? false
435
- };
436
- const cacheBreakpointsEnabled = params.cacheBreakpoints === true;
437
- const reasoningEnabled = params.supportsReasoning === true;
438
- return {
439
- name,
440
- meta: { defaultModel, capabilities },
441
- formatTools,
442
- userMessage,
443
- assistantMessage,
444
- toolResultsMessage,
445
- classifyError: classifyOpenAICompatError,
446
- async stream(options, callbacks) {
447
- const modelId = options.model || defaultModel;
448
- const messages = toOAIMessages(options.system, options.messages, {
449
- imageInToolResult: capabilities.imageInToolResult === true,
450
- supportsReasoning: reasoningEnabled,
451
- model: modelId
452
- });
453
- const shouldCache = cacheBreakpointsEnabled && options.cache !== false;
454
- if (shouldCache) {
455
- applyOAICacheBreakpoints(messages);
456
- }
457
- const maxTokens = options.thinkingBudget ? options.thinkingBudget + options.maxTokens : options.maxTokens;
458
- const body = {
459
- // Spread first so the typed core below wins on collision — explicit
460
- // always overrides generic.
461
- ...params.extraBodyParams ?? {},
462
- model: modelId,
463
- messages,
464
- max_tokens: maxTokens,
465
- stream: true
466
- };
467
- if (reasoningEnabled) {
468
- const reasoning = planOpenRouterReasoning(options.thinking, options.thinkingBudget);
469
- if (reasoning)
470
- body.reasoning = reasoning;
471
- }
472
- if (options.tools && options.tools.length > 0) {
473
- body.tools = shouldCache ? applyOAIToolCacheBreakpoint(options.tools) : options.tools;
474
- }
475
- if (options.toolChoice) {
476
- if (options.toolChoice.type === "tool" && options.toolChoice.name)
477
- body.tool_choice = { type: "function", function: { name: options.toolChoice.name } };
478
- else if (options.toolChoice.type === "required")
479
- body.tool_choice = "required";
480
- else
481
- body.tool_choice = "auto";
482
- }
483
- const response = await fetch(endpoint, {
484
- method: "POST",
485
- headers: {
486
- [authHeaderName]: authHeaderValue,
487
- "Content-Type": "application/json",
488
- ...extraHeaders
489
- },
490
- body: JSON.stringify(body),
491
- signal: options.signal
492
- });
493
- if (!response.ok) {
494
- const errorText = await response.text();
495
- throw new OpenAICompatHttpError(response.status, errorText);
496
- }
497
- const result = await consumeSSE(response, callbacks, options.signal);
498
- const finishReason = mapOAIFinishReason(result.finishReason);
499
- return {
500
- assistantMessage: buildAssistantContent(
501
- result.text,
502
- result.toolCalls,
503
- result.thinking,
504
- reasoningEnabled && result.reasoningDetails.length > 0 ? { details: result.reasoningDetails, producer: "openrouter", model: modelId } : void 0
505
- ),
506
- text: result.text,
507
- toolCalls: result.toolCalls,
508
- done: result.finishReason === "stop" || result.toolCalls.length === 0,
509
- usage: {
510
- input: result.usage.input,
511
- output: result.usage.output,
512
- ...result.usage.cacheRead !== void 0 ? { cacheRead: result.usage.cacheRead } : {},
513
- ...result.usage.cacheCreation !== void 0 ? { cacheCreation: result.usage.cacheCreation } : {},
514
- ...result.usage.cost !== void 0 ? { cost: result.usage.cost } : {},
515
- ...finishReason ? { finishReason } : {},
516
- modelId
517
- }
518
- };
519
- }
520
- };
521
- }
522
-
523
- // src/session/messages.ts
524
- function decodeAnthropicToolResultContent(content) {
525
- if (typeof content === "string")
526
- return content;
527
- if (!Array.isArray(content))
528
- return JSON.stringify(content);
529
- const blocks = [];
530
- for (const raw of content) {
531
- if (!raw || typeof raw !== "object")
532
- continue;
533
- const b = raw;
534
- if (b.type === "text" && typeof b.text === "string") {
535
- blocks.push({ type: "text", text: b.text });
536
- continue;
537
- }
538
- if (b.type === "image" && b.source && typeof b.source === "object") {
539
- const src = b.source;
540
- if (src.type === "base64" && typeof src.data === "string" && typeof src.media_type === "string") {
541
- blocks.push({ type: "image", mediaType: src.media_type, data: src.data });
542
- continue;
543
- }
544
- }
545
- blocks.push({ type: "text", text: JSON.stringify(raw) });
546
- }
547
- if (blocks.length === 0)
548
- return "";
549
- const hasNonText = blocks.some((b) => b.type !== "text");
550
- if (!hasNonText)
551
- return blocks.map((b) => b.text).join("\n");
552
- return blocks;
553
- }
554
- function encodeAnthropicToolResultContent(output) {
555
- if (typeof output === "string")
556
- return output;
557
- return output.map((b) => {
558
- if (b.type === "text")
559
- return { type: "text", text: b.text };
560
- return {
561
- type: "image",
562
- source: { type: "base64", media_type: b.mediaType, data: b.data }
563
- };
564
- });
565
- }
566
- function fromAnthropic(msg) {
567
- const role = msg.role;
568
- const content = [];
569
- if (typeof msg.content === "string") {
570
- content.push({ type: "text", text: msg.content });
571
- return { role, content };
572
- }
573
- if (Array.isArray(msg.content)) {
574
- for (const block of msg.content) {
575
- if (!block || typeof block !== "object")
576
- continue;
577
- const b = block;
578
- if (b.type === "text") {
579
- content.push({ type: "text", text: b.text });
580
- } else if (b.type === "image") {
581
- const source = b.source;
582
- if (source?.type === "base64") {
583
- content.push({ type: "image", mediaType: source.media_type, data: source.data });
584
- }
585
- } else if (b.type === "tool_use") {
586
- content.push({ type: "tool_call", id: b.id, name: b.name, input: b.input });
587
- } else if (b.type === "tool_result") {
588
- const output = decodeAnthropicToolResultContent(b.content);
589
- const block2 = {
590
- type: "tool_result",
591
- callId: b.tool_use_id,
592
- output
593
- };
594
- if (b.is_error === true)
595
- block2.isError = true;
596
- content.push(block2);
597
- } else if (b.type === "thinking") {
598
- const block2 = {
599
- type: "thinking",
600
- text: b.thinking ?? ""
601
- };
602
- if (typeof b.signature === "string") {
603
- block2.signature = b.signature;
604
- block2.signatureProducer = "anthropic";
605
- }
606
- content.push(block2);
607
- } else if (b.type === "redacted_thinking") {
608
- content.push({ type: "redacted_thinking", data: b.data ?? "" });
609
- }
610
- }
611
- }
612
- return { role, content };
613
- }
614
- function fromOpenAI(msg) {
615
- const role = msg.role;
616
- const content = [];
617
- const c = msg.content;
618
- if (c == null) {
619
- return { role, content };
620
- }
621
- if (typeof c === "string") {
622
- content.push({ type: "text", text: c });
623
- return { role, content };
624
- }
625
- if (typeof c === "object" && !Array.isArray(c) && c._tag === ASSISTANT_TOOL_CALLS_TAG) {
626
- const tagged = c;
627
- if (typeof tagged.text === "string" && tagged.text) {
628
- content.push({ type: "text", text: tagged.text });
629
- }
630
- if (Array.isArray(tagged.tool_calls)) {
631
- for (const raw of tagged.tool_calls) {
632
- if (!raw || typeof raw !== "object")
633
- continue;
634
- const tc = raw;
635
- const rawArgs = tc.function?.arguments;
636
- const input = rawArgs ? typeof rawArgs === "string" ? JSON.parse(rawArgs) : rawArgs : {};
637
- content.push({ type: "tool_call", id: tc.id ?? "", name: tc.function?.name ?? "", input });
638
- }
639
- }
640
- return { role, content };
641
- }
642
- if (typeof c === "object" && !Array.isArray(c) && c._tag === TOOL_RESULTS_TAG) {
643
- const tagged = c;
644
- if (Array.isArray(tagged.results)) {
645
- for (const raw of tagged.results) {
646
- if (!raw || typeof raw !== "object")
647
- continue;
648
- const r = raw;
649
- content.push({ type: "tool_result", callId: r.tool_call_id ?? "", output: r.content ?? "" });
650
- }
651
- }
652
- return { role, content };
653
- }
654
- if (Array.isArray(c)) {
655
- for (const block of c) {
656
- if (!block || typeof block !== "object")
657
- continue;
658
- const b = block;
659
- if (b.type === "text") {
660
- content.push({ type: "text", text: b.text });
661
- } else if (b.type === "image_url") {
662
- const imageUrl = b.image_url?.url;
663
- if (imageUrl?.startsWith("data:")) {
664
- const [meta, data] = imageUrl.slice(5).split(",", 2);
665
- const mediaType = meta.replace(";base64", "");
666
- content.push({ type: "image", mediaType, data });
667
- }
668
- }
669
- }
670
- return { role, content };
671
- }
672
- return { role, content };
673
- }
674
- function toAnthropic(msg) {
675
- const blocks = msg.content.filter((b) => !(b.type === "thinking" && b.signatureProducer === "openai")).filter((b) => b.type !== "provider_reasoning").map((block) => {
676
- switch (block.type) {
677
- case "text":
678
- return { type: "text", text: block.text };
679
- case "image":
680
- return { type: "image", source: { type: "base64", media_type: block.mediaType, data: block.data } };
681
- case "tool_call":
682
- return { type: "tool_use", id: block.id, name: block.name, input: block.input };
683
- case "tool_result": {
684
- const out = {
685
- type: "tool_result",
686
- tool_use_id: block.callId,
687
- content: encodeAnthropicToolResultContent(block.output)
688
- };
689
- if (block.isError)
690
- out.is_error = true;
691
- return out;
692
- }
693
- case "thinking": {
694
- const out = { type: "thinking", thinking: block.text };
695
- if (block.signature)
696
- out.signature = block.signature;
697
- return out;
698
- }
699
- case "redacted_thinking":
700
- return { type: "redacted_thinking", data: block.data };
701
- default:
702
- return { type: "text", text: "" };
703
- }
704
- });
705
- if (blocks.length === 1 && blocks[0].type === "text") {
706
- return { role: msg.role, content: blocks[0].text };
707
- }
708
- return { role: msg.role, content: blocks };
709
- }
710
- function toOpenAI(msg) {
711
- const toolCalls = msg.content.filter((b) => b.type === "tool_call");
712
- const toolResults = msg.content.filter((b) => b.type === "tool_result");
713
- const textBlocks = msg.content.filter((b) => b.type === "text");
714
- const imageBlocks = msg.content.filter((b) => b.type === "image");
715
- if (toolResults.length > 0) {
716
- return {
717
- role: msg.role,
718
- content: {
719
- _tag: TOOL_RESULTS_TAG,
720
- results: toolResults.map((b) => {
721
- const tr = b;
722
- return { tool_call_id: tr.callId, content: tr.output };
723
- })
724
- }
725
- };
726
- }
727
- if (toolCalls.length > 0) {
728
- const textContent = textBlocks.length > 0 ? textBlocks[0].text : null;
729
- return {
730
- role: msg.role,
731
- content: {
732
- _tag: ASSISTANT_TOOL_CALLS_TAG,
733
- text: textContent,
734
- tool_calls: toolCalls.map((b) => {
735
- const tc = b;
736
- return {
737
- id: tc.id,
738
- type: "function",
739
- function: { name: tc.name, arguments: JSON.stringify(tc.input) }
740
- };
741
- })
742
- }
743
- };
744
- }
745
- if (imageBlocks.length > 0) {
746
- const parts = imageBlocks.map((b) => {
747
- const img = b;
748
- return { type: "image_url", image_url: { url: `data:${img.mediaType};base64,${img.data}` } };
749
- });
750
- for (const b of textBlocks) {
751
- parts.push({ type: "text", text: b.text });
752
- }
753
- return { role: msg.role, content: parts };
754
- }
755
- if (textBlocks.length === 1) {
756
- return { role: msg.role, content: textBlocks[0].text };
757
- }
758
- if (textBlocks.length > 1) {
759
- return { role: msg.role, content: textBlocks.map((b) => ({ type: "text", text: b.text })) };
760
- }
761
- return { role: msg.role, content: null };
762
- }
763
- function autoDetectAndConvert(msg) {
764
- const c = msg.content;
765
- if (c && typeof c === "object" && !Array.isArray(c)) {
766
- const tag = c._tag;
767
- if (typeof tag === "string" && tag.startsWith("__zidane_"))
768
- return fromOpenAI(msg);
769
- }
770
- if (Array.isArray(c)) {
771
- for (const block of c) {
772
- if (!block || typeof block !== "object")
773
- continue;
774
- const b = block;
775
- if (b.type === "tool_use" || b.type === "tool_result" && "tool_use_id" in b) {
776
- return fromAnthropic(msg);
777
- }
778
- if (b.type === "image_url") {
779
- return fromOpenAI(msg);
780
- }
781
- }
782
- return fromAnthropic(msg);
783
- }
784
- if (typeof c === "string") {
785
- return fromAnthropic(msg);
786
- }
787
- return fromAnthropic(msg);
788
- }
789
-
790
- export {
791
- userMessage,
792
- assistantMessage,
793
- toolResultsMessage,
794
- OpenAICompatHttpError,
795
- classifyOpenAICompatError,
796
- mapOAIFinishReason,
797
- openaiCompat,
798
- fromAnthropic,
799
- fromOpenAI,
800
- toAnthropic,
801
- toOpenAI,
802
- autoDetectAndConvert
803
- };