zidane 2.0.1 → 2.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  AgentToolNotAllowedError
3
- } from "./chunk-7JTBBZ2U.js";
3
+ } from "./chunk-LNN5UTS2.js";
4
4
 
5
5
  // src/skills/activation.ts
6
6
  function createSkillActivationState(options = {}) {
@@ -328,10 +328,12 @@ function escapeXml(str) {
328
328
  import { existsSync, readdirSync, readFileSync, statSync } from "fs";
329
329
  import { homedir } from "os";
330
330
  import { basename as basename2, dirname, join, resolve } from "path";
331
- var FRONTMATTER_RE = /^---\n([\s\S]*?)\n---\n([\s\S]*)$/;
331
+ var FRONTMATTER_RE = /^---\r?\n([\s\S]*?)\r?\n---\r?\n?([\s\S]*)$/;
332
332
  var INDENT_RE = /^[ \t]{2,}/;
333
333
  var KV_RE = /^([^:]+):(.*)$/;
334
- var QUOTE_RE = /^(['"])(.*)\1$/;
334
+ var DOUBLE_QUOTED_RE = /^"((?:\\.|[^"\\])*)"$/;
335
+ var SINGLE_QUOTED_RE = /^'((?:''|[^'])*)'$/;
336
+ var DQ_ESCAPE_RE = /\\(["\\/bfnrt])/g;
335
337
  var WHITESPACE_SPLIT_RE = /\s+/;
336
338
  var PARAGRAPH_SPLIT_RE = /\n\n/;
337
339
  var COMMA_OR_SPACE_RE = /[,\s]+/;
@@ -390,11 +392,54 @@ function matchFirstColon(line) {
390
392
  return { key, value };
391
393
  }
392
394
  function unquoteYaml(val) {
393
- const m = val.match(QUOTE_RE);
394
- if (m)
395
- return m[2];
395
+ const dq = val.match(DOUBLE_QUOTED_RE);
396
+ if (dq) {
397
+ return dq[1].replace(DQ_ESCAPE_RE, (_, ch) => {
398
+ switch (ch) {
399
+ case '"':
400
+ return '"';
401
+ case "\\":
402
+ return "\\";
403
+ case "/":
404
+ return "/";
405
+ case "b":
406
+ return "\b";
407
+ case "f":
408
+ return "\f";
409
+ case "n":
410
+ return "\n";
411
+ case "r":
412
+ return "\r";
413
+ case "t":
414
+ return " ";
415
+ default:
416
+ return ch;
417
+ }
418
+ });
419
+ }
420
+ const sq = val.match(SINGLE_QUOTED_RE);
421
+ if (sq) {
422
+ return sq[1].replace(/''/g, "'");
423
+ }
424
+ const hashIdx = val.indexOf(" #");
425
+ if (hashIdx >= 0)
426
+ return val.slice(0, hashIdx).trimEnd();
396
427
  return val;
397
428
  }
429
+ function takeString(frontmatter, key, diagnostics) {
430
+ const raw = frontmatter[key];
431
+ if (raw === void 0 || raw === null)
432
+ return void 0;
433
+ if (typeof raw === "string")
434
+ return raw;
435
+ diagnostics.push({
436
+ severity: "warning",
437
+ code: "invalid-field-type",
438
+ message: `Frontmatter field "${key}" expected string, got ${typeof raw}. Coerced.`,
439
+ field: key
440
+ });
441
+ return String(raw);
442
+ }
398
443
  var RESOURCE_DIRS = {
399
444
  scripts: "script",
400
445
  references: "reference",
@@ -409,9 +454,10 @@ function enumerateResources(baseDir) {
409
454
  try {
410
455
  const files = readdirSync(dirPath, { recursive: true });
411
456
  for (const file of files) {
412
- const fullPath = join(dirPath, file);
457
+ const rel = typeof file === "string" ? file : file.toString("utf-8");
458
+ const fullPath = join(dirPath, rel);
413
459
  if (statSync(fullPath).isFile()) {
414
- resources.push({ path: join(dir, file), type });
460
+ resources.push({ path: join(dir, rel), type });
415
461
  }
416
462
  }
417
463
  } catch {
@@ -436,7 +482,7 @@ async function parseSkillFile(filePath, options = {}) {
436
482
  return null;
437
483
  const content = readFileSync(absPath, "utf-8");
438
484
  const { frontmatter, body, diagnostics } = parseFrontmatter(content);
439
- let description = frontmatter.description;
485
+ let description = takeString(frontmatter, "description", diagnostics);
440
486
  if (!description && body) {
441
487
  const firstParagraph = body.split(PARAGRAPH_SPLIT_RE)[0]?.trim();
442
488
  if (firstParagraph)
@@ -454,12 +500,13 @@ async function parseSkillFile(filePath, options = {}) {
454
500
  }
455
501
  const baseDir = dirname(absPath);
456
502
  const dirName = basename2(baseDir);
457
- const name = frontmatter.name || dirName;
458
- if (frontmatter.name && frontmatter.name !== dirName) {
503
+ const frontmatterName = takeString(frontmatter, "name", diagnostics);
504
+ const name = frontmatterName || dirName;
505
+ if (frontmatterName && frontmatterName !== dirName) {
459
506
  diagnostics.push({
460
507
  severity: "warning",
461
508
  code: "name-mismatch-directory",
462
- message: `Skill name "${frontmatter.name}" does not match parent directory "${dirName}". Loading anyway.`,
509
+ message: `Skill name "${frontmatterName}" does not match parent directory "${dirName}". Loading anyway.`,
463
510
  field: "name"
464
511
  });
465
512
  }
@@ -488,23 +535,25 @@ async function parseSkillFile(filePath, options = {}) {
488
535
  baseDir,
489
536
  resources: enumerateResources(baseDir)
490
537
  };
491
- if (frontmatter.license)
492
- config.license = frontmatter.license;
493
- if (frontmatter.compatibility) {
494
- const comp = frontmatter.compatibility;
495
- if (comp.length > 500) {
538
+ const license = takeString(frontmatter, "license", diagnostics);
539
+ if (license)
540
+ config.license = license;
541
+ const compatibility = takeString(frontmatter, "compatibility", diagnostics);
542
+ if (compatibility) {
543
+ if (compatibility.length > 500) {
496
544
  diagnostics.push({
497
545
  severity: "warning",
498
546
  code: "compatibility-too-long",
499
- message: `Compatibility exceeds spec limit of 500 characters (got ${comp.length}). Loading anyway.`,
547
+ message: `Compatibility exceeds spec limit of 500 characters (got ${compatibility.length}). Loading anyway.`,
500
548
  field: "compatibility"
501
549
  });
502
550
  }
503
- config.compatibility = comp;
551
+ config.compatibility = compatibility;
504
552
  }
505
553
  const metadata = {};
506
- if (frontmatter.metadata && typeof frontmatter.metadata === "object") {
507
- for (const [k, v] of Object.entries(frontmatter.metadata)) {
554
+ const rawMetadata = frontmatter.metadata;
555
+ if (rawMetadata && typeof rawMetadata === "object" && !Array.isArray(rawMetadata)) {
556
+ for (const [k, v] of Object.entries(rawMetadata)) {
508
557
  if (typeof v !== "string") {
509
558
  diagnostics.push({
510
559
  severity: "warning",
@@ -517,11 +566,17 @@ async function parseSkillFile(filePath, options = {}) {
517
566
  }
518
567
  metadata[k] = v;
519
568
  }
569
+ } else if (rawMetadata !== void 0) {
570
+ diagnostics.push({
571
+ severity: "warning",
572
+ code: "invalid-metadata-shape",
573
+ message: `Frontmatter "metadata" expected a map, got ${Array.isArray(rawMetadata) ? "array" : typeof rawMetadata}. Ignored.`,
574
+ field: "metadata"
575
+ });
520
576
  }
521
- if (frontmatter.paths) {
522
- const raw = frontmatter.paths;
523
- const normalized = raw.split(COMMA_OR_SPACE_RE).filter(Boolean).join(",");
524
- metadata["zidane.paths"] = normalized;
577
+ const pathsField = takeString(frontmatter, "paths", diagnostics);
578
+ if (pathsField) {
579
+ metadata["zidane.paths"] = pathsField.split(COMMA_OR_SPACE_RE).filter(Boolean).join(",");
525
580
  diagnostics.push({
526
581
  severity: "warning",
527
582
  code: "deprecated-top-level-field",
@@ -529,8 +584,9 @@ async function parseSkillFile(filePath, options = {}) {
529
584
  field: "paths"
530
585
  });
531
586
  }
532
- if (frontmatter.model) {
533
- metadata["zidane.model"] = frontmatter.model;
587
+ const modelField = takeString(frontmatter, "model", diagnostics);
588
+ if (modelField) {
589
+ metadata["zidane.model"] = modelField;
534
590
  diagnostics.push({
535
591
  severity: "warning",
536
592
  code: "deprecated-top-level-field",
@@ -538,20 +594,23 @@ async function parseSkillFile(filePath, options = {}) {
538
594
  field: "model"
539
595
  });
540
596
  }
541
- const legacyThinking = frontmatter.thinking ?? frontmatter.effort;
597
+ const thinkingField = takeString(frontmatter, "thinking", diagnostics);
598
+ const effortField = thinkingField ? void 0 : takeString(frontmatter, "effort", diagnostics);
599
+ const legacyThinking = thinkingField ?? effortField;
542
600
  if (legacyThinking) {
543
601
  metadata["zidane.thinking"] = legacyThinking;
544
602
  diagnostics.push({
545
603
  severity: "warning",
546
604
  code: "deprecated-top-level-field",
547
- message: `\`${frontmatter.thinking ? "thinking" : "effort"}\` is not a spec field and is deprecated \u2014 moved to \`metadata["zidane.thinking"]\`.`,
548
- field: frontmatter.thinking ? "thinking" : "effort"
605
+ message: `\`${thinkingField ? "thinking" : "effort"}\` is not a spec field and is deprecated \u2014 moved to \`metadata["zidane.thinking"]\`.`,
606
+ field: thinkingField ? "thinking" : "effort"
549
607
  });
550
608
  }
551
609
  if (Object.keys(metadata).length > 0)
552
610
  config.metadata = metadata;
553
- if (frontmatter["allowed-tools"]) {
554
- config.allowedTools = frontmatter["allowed-tools"].split(WHITESPACE_SPLIT_RE).filter(Boolean);
611
+ const allowedTools = takeString(frontmatter, "allowed-tools", diagnostics);
612
+ if (allowedTools) {
613
+ config.allowedTools = allowedTools.split(WHITESPACE_SPLIT_RE).filter(Boolean);
555
614
  }
556
615
  if (diagnostics.length > 0)
557
616
  config.diagnostics = diagnostics;
@@ -627,6 +686,7 @@ import { join as join2 } from "path";
627
686
  var YAML_RESERVED_RE = /[:#&*!|>%@`]/;
628
687
  var YAML_EDGE_OR_QUOTE_RE = /^\s|\s$|["']/;
629
688
  var DQUOTE_RE = /"/g;
689
+ var LEADING_NEWLINES_RE = /^\n+/;
630
690
  function yamlEscape(value) {
631
691
  if (YAML_RESERVED_RE.test(value) || YAML_EDGE_OR_QUOTE_RE.test(value) || value === "")
632
692
  return `"${value.replace(DQUOTE_RE, '\\"')}"`;
@@ -661,10 +721,11 @@ ${summary}`);
661
721
  const skillDir = join2(targetDir, skill.name);
662
722
  mkdirSync(skillDir, { recursive: true });
663
723
  const frontmatter = serializeFrontmatter(skill);
664
- const body = skill.instructions ? `
665
- ${skill.instructions}` : "";
666
- const content = `${frontmatter}
667
- ${body}
724
+ const bodyTrimmed = skill.instructions ? skill.instructions.replace(LEADING_NEWLINES_RE, "") : "";
725
+ const content = bodyTrimmed ? `${frontmatter}
726
+
727
+ ${bodyTrimmed}
728
+ ` : `${frontmatter}
668
729
  `;
669
730
  const skillPath = join2(skillDir, "SKILL.md");
670
731
  writeFileSync(skillPath, content);
@@ -744,7 +805,8 @@ async function interpolateShellCommands(instructions, execution, handle) {
744
805
  const output = result2.exitCode === 0 ? result2.stdout.trim() : `[command failed (exit ${result2.exitCode}): ${result2.stderr.trim() || result2.stdout.trim()}]`;
745
806
  replacements.push({ index, length, output });
746
807
  } catch (err) {
747
- replacements.push({ index, length, output: `[command error: ${err.message}]` });
808
+ const message = err instanceof Error ? err.message : String(err);
809
+ replacements.push({ index, length, output: `[command error: ${message}]` });
748
810
  }
749
811
  }
750
812
  let result = instructions;
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  validateSkillForWrite
3
- } from "./chunk-BCXXXJ3G.js";
3
+ } from "./chunk-DCYJYM3E.js";
4
4
 
5
5
  // src/skills/index.ts
6
6
  function defineSkill(config) {
@@ -69,8 +69,11 @@ function resultToString(content) {
69
69
  if (!content || !Array.isArray(content))
70
70
  return "";
71
71
  return content.map((block) => {
72
- if (block?.type === "text")
73
- return block.text;
72
+ if (block && typeof block === "object" && block.type === "text") {
73
+ const text = block.text;
74
+ if (typeof text === "string")
75
+ return text;
76
+ }
74
77
  return JSON.stringify(block);
75
78
  }).join("\n");
76
79
  }
@@ -144,6 +147,7 @@ async function connectMcpServers(configs, _clientFactory, hooks) {
144
147
  const connections = [];
145
148
  const tools = {};
146
149
  const errors = [];
150
+ let closed = false;
147
151
  for (const config of configs) {
148
152
  try {
149
153
  const client = _clientFactory ? _clientFactory() : new Client({ name: "zidane", version: "1.0.0" });
@@ -162,7 +166,7 @@ async function connectMcpServers(configs, _clientFactory, hooks) {
162
166
  inputSchema: tool.inputSchema ?? { type: "object", properties: {} }
163
167
  },
164
168
  execute: async (input, ctx) => {
165
- const { turnId, callId } = ctx;
169
+ const { turnId, callId, signal } = ctx;
166
170
  const displayName = ctx.harness?.toolAliases?.[namespacedName] ?? namespacedName;
167
171
  const gateCtx = {
168
172
  turnId,
@@ -188,13 +192,12 @@ async function connectMcpServers(configs, _clientFactory, hooks) {
188
192
  });
189
193
  const timeout = config.toolTimeout ?? 3e4;
190
194
  try {
191
- let timer;
192
- const result = await Promise.race([
193
- client.callTool({ name: tool.name, arguments: effectiveInput }),
194
- new Promise((_, reject) => {
195
- timer = setTimeout(() => reject(new Error(`MCP tool "${tool.name}" on server "${config.name}" timed out after ${timeout}ms`)), timeout);
196
- })
197
- ]).finally(() => clearTimeout(timer));
195
+ const result = await raceWithTimeoutAndSignal(
196
+ () => client.callTool({ name: tool.name, arguments: effectiveInput }),
197
+ timeout,
198
+ `MCP tool "${tool.name}" on server "${config.name}" timed out after ${timeout}ms`,
199
+ signal
200
+ );
198
201
  let output = packMcpResult(result.content);
199
202
  const transformCtx = {
200
203
  turnId,
@@ -218,6 +221,7 @@ async function connectMcpServers(configs, _clientFactory, hooks) {
218
221
  });
219
222
  return output;
220
223
  } catch (err) {
224
+ const error = err instanceof Error ? err : new Error(String(err));
221
225
  await hooks?.callHook("mcp:tool:error", {
222
226
  turnId,
223
227
  callId,
@@ -225,7 +229,7 @@ async function connectMcpServers(configs, _clientFactory, hooks) {
225
229
  tool: tool.name,
226
230
  displayName,
227
231
  input: effectiveInput,
228
- error: err
232
+ error
229
233
  });
230
234
  await hooks?.callHook("mcp:tool:after", {
231
235
  turnId,
@@ -234,9 +238,9 @@ async function connectMcpServers(configs, _clientFactory, hooks) {
234
238
  tool: tool.name,
235
239
  displayName,
236
240
  input: effectiveInput,
237
- result: err.message ?? String(err)
241
+ result: error.message
238
242
  });
239
- throw err;
243
+ throw error;
240
244
  }
241
245
  }
242
246
  };
@@ -247,8 +251,9 @@ async function connectMcpServers(configs, _clientFactory, hooks) {
247
251
  tools: toolNames
248
252
  });
249
253
  } catch (err) {
250
- errors.push({ name: config.name, error: err });
251
- await hooks?.callHook("mcp:error", { name: config.name, error: err });
254
+ const error = err instanceof Error ? err : new Error(String(err));
255
+ errors.push({ name: config.name, error });
256
+ await hooks?.callHook("mcp:error", { name: config.name, error });
252
257
  }
253
258
  }
254
259
  if (errors.length > 0 && connections.length === 0) {
@@ -258,6 +263,9 @@ async function connectMcpServers(configs, _clientFactory, hooks) {
258
263
  return {
259
264
  tools,
260
265
  close: async () => {
266
+ if (closed)
267
+ return;
268
+ closed = true;
261
269
  await Promise.allSettled(
262
270
  connections.map(async ({ name, client }) => {
263
271
  await hooks?.callHook("mcp:close", { name });
@@ -267,6 +275,27 @@ async function connectMcpServers(configs, _clientFactory, hooks) {
267
275
  }
268
276
  };
269
277
  }
278
+ async function raceWithTimeoutAndSignal(task, timeoutMs, timeoutMessage, signal) {
279
+ if (signal?.aborted)
280
+ throw new Error("MCP tool call aborted");
281
+ let timer;
282
+ let onAbort;
283
+ try {
284
+ return await new Promise((resolvePromise, rejectPromise) => {
285
+ timer = setTimeout(() => rejectPromise(new Error(timeoutMessage)), timeoutMs);
286
+ if (signal) {
287
+ onAbort = () => rejectPromise(new Error("MCP tool call aborted"));
288
+ signal.addEventListener("abort", onAbort, { once: true });
289
+ }
290
+ task().then(resolvePromise, rejectPromise);
291
+ });
292
+ } finally {
293
+ if (timer)
294
+ clearTimeout(timer);
295
+ if (signal && onAbort)
296
+ signal.removeEventListener("abort", onAbort);
297
+ }
298
+ }
270
299
 
271
300
  export {
272
301
  normalizeMcpServers,
@@ -14,11 +14,18 @@ var AgentProviderError = class extends Error {
14
14
  code = "provider_error";
15
15
  provider;
16
16
  providerCode;
17
+ /**
18
+ * Whether a retry with backoff is likely to succeed. See
19
+ * {@link ClassifiedError.retryable}. Absent when the provider did not
20
+ * classify the error — callers should treat absent as "don't retry".
21
+ */
22
+ retryable;
17
23
  constructor(message, options) {
18
24
  super(message, options.cause !== void 0 ? { cause: options.cause } : void 0);
19
25
  this.name = "AgentProviderError";
20
26
  this.provider = options.provider;
21
27
  this.providerCode = options.providerCode;
28
+ this.retryable = options.retryable;
22
29
  }
23
30
  };
24
31
  var AgentAbortedError = class extends Error {
@@ -74,6 +81,7 @@ function toTypedError(classification, provider, cause) {
74
81
  return new AgentProviderError(message, {
75
82
  provider,
76
83
  providerCode: classification.providerCode,
84
+ retryable: classification.retryable,
77
85
  cause
78
86
  });
79
87
  }
@@ -368,12 +368,14 @@ async function createSession(options = {}) {
368
368
  get metadata() {
369
369
  return data.metadata;
370
370
  },
371
- startRun(runId, prompt) {
371
+ startRun(runId, prompt, extras) {
372
372
  data.runs.push({
373
373
  id: runId,
374
374
  startedAt: Date.now(),
375
375
  prompt: prompt ?? "",
376
- status: "running"
376
+ status: "running",
377
+ ...extras?.parentRunId ? { parentRunId: extras.parentRunId } : {},
378
+ ...typeof extras?.depth === "number" ? { depth: extras.depth } : {}
377
379
  });
378
380
  touch();
379
381
  },
@@ -1,10 +1,17 @@
1
1
  import {
2
2
  matchesContextExceeded
3
- } from "./chunk-7JTBBZ2U.js";
3
+ } from "./chunk-LNN5UTS2.js";
4
4
 
5
5
  // src/providers/openai-compat.ts
6
6
  var TOOL_RESULTS_TAG = "__zidane_tool_results__";
7
7
  var ASSISTANT_TOOL_CALLS_TAG = "__zidane_assistant_tc__";
8
+ var SSE_MAX_BUFFER_BYTES = 8 * 1024 * 1024;
9
+ var OpenAICompatStreamError = class extends Error {
10
+ constructor(message) {
11
+ super(message);
12
+ this.name = "OpenAICompatStreamError";
13
+ }
14
+ };
8
15
  async function consumeSSE(response, callbacks, signal) {
9
16
  const reader = response.body.getReader();
10
17
  const decoder = new TextDecoder();
@@ -22,6 +29,11 @@ async function consumeSSE(response, callbacks, signal) {
22
29
  if (done)
23
30
  break;
24
31
  buffer += decoder.decode(value, { stream: true });
32
+ if (buffer.length > SSE_MAX_BUFFER_BYTES) {
33
+ throw new OpenAICompatStreamError(
34
+ `SSE buffer exceeded ${SSE_MAX_BUFFER_BYTES} bytes without a line boundary \u2014 upstream may be streaming non-SSE data.`
35
+ );
36
+ }
25
37
  const lines = buffer.split("\n");
26
38
  buffer = lines.pop() || "";
27
39
  for (const line of lines) {
@@ -36,22 +48,27 @@ async function consumeSSE(response, callbacks, signal) {
36
48
  } catch {
37
49
  continue;
38
50
  }
39
- const choice = chunk.choices?.[0];
51
+ const choices = chunk.choices;
52
+ const choice = choices?.[0];
40
53
  if (!choice)
41
54
  continue;
42
- if (choice.finish_reason)
43
- finishReason = choice.finish_reason;
44
- const thinkingDelta = choice.delta?.reasoning_content ?? choice.delta?.reasoning;
55
+ const fr = choice.finish_reason;
56
+ if (fr)
57
+ finishReason = fr;
58
+ const delta = choice.delta;
59
+ const thinkingDelta = delta?.reasoning_content ?? delta?.reasoning;
45
60
  if (thinkingDelta) {
46
61
  thinking += thinkingDelta;
47
62
  callbacks.onThinking?.(thinkingDelta);
48
63
  }
49
- if (choice.delta?.content) {
50
- text += choice.delta.content;
51
- callbacks.onText(choice.delta.content);
64
+ const contentDelta = delta?.content;
65
+ if (contentDelta) {
66
+ text += contentDelta;
67
+ callbacks.onText(contentDelta);
52
68
  }
53
- if (choice.delta?.tool_calls) {
54
- for (const tc of choice.delta.tool_calls) {
69
+ const toolCallsDelta = delta?.tool_calls;
70
+ if (toolCallsDelta) {
71
+ for (const tc of toolCallsDelta) {
55
72
  const existing = tcMap.get(tc.index);
56
73
  if (existing) {
57
74
  if (tc.function?.arguments)
@@ -65,11 +82,12 @@ async function consumeSSE(response, callbacks, signal) {
65
82
  }
66
83
  }
67
84
  }
68
- if (chunk.usage) {
85
+ const chunkUsage = chunk.usage;
86
+ if (chunkUsage) {
69
87
  usage = {
70
- input: chunk.usage.prompt_tokens,
71
- output: chunk.usage.completion_tokens,
72
- cost: chunk.usage.total_cost ?? void 0
88
+ input: chunkUsage.prompt_tokens ?? 0,
89
+ output: chunkUsage.completion_tokens ?? 0,
90
+ cost: chunkUsage.total_cost ?? void 0
73
91
  };
74
92
  }
75
93
  }
@@ -77,11 +95,20 @@ async function consumeSSE(response, callbacks, signal) {
77
95
  } finally {
78
96
  reader.releaseLock();
79
97
  }
80
- const toolCalls = Array.from(tcMap.values()).map((tc) => ({
81
- id: tc.id,
82
- name: tc.name,
83
- input: tc.args ? JSON.parse(tc.args) : {}
84
- }));
98
+ const toolCalls = [];
99
+ for (const tc of tcMap.values()) {
100
+ if (!tc.args) {
101
+ toolCalls.push({ id: tc.id, name: tc.name, input: {} });
102
+ continue;
103
+ }
104
+ try {
105
+ toolCalls.push({ id: tc.id, name: tc.name, input: JSON.parse(tc.args) });
106
+ } catch (err) {
107
+ throw new OpenAICompatStreamError(
108
+ `Tool call "${tc.name}" (${tc.id}) arguments were truncated or malformed: ${err.message}`
109
+ );
110
+ }
111
+ }
85
112
  return { text, thinking, toolCalls, finishReason, usage };
86
113
  }
87
114
  function toImageUrlPart(img) {
@@ -249,6 +276,14 @@ function classifyOpenAICompatError(err) {
249
276
  return null;
250
277
  if (err.name === "AbortError")
251
278
  return { kind: "aborted" };
279
+ if (err instanceof OpenAICompatStreamError) {
280
+ return {
281
+ kind: "provider_error",
282
+ providerCode: "stream_error",
283
+ message: err.message,
284
+ retryable: true
285
+ };
286
+ }
252
287
  if (!(err instanceof OpenAICompatHttpError))
253
288
  return null;
254
289
  const code = err.providerCode;
@@ -263,9 +298,17 @@ function classifyOpenAICompatError(err) {
263
298
  return {
264
299
  kind: "provider_error",
265
300
  providerCode: code ?? String(err.status),
266
- message: msg
301
+ message: msg,
302
+ retryable: isRetryableHttpStatus(err.status)
267
303
  };
268
304
  }
305
+ function isRetryableHttpStatus(status) {
306
+ if (status === 429)
307
+ return true;
308
+ if (status >= 500 && status !== 501)
309
+ return true;
310
+ return false;
311
+ }
269
312
  function mapOAIFinishReason(reason) {
270
313
  if (!reason)
271
314
  return void 0;
@@ -4,7 +4,7 @@ import {
4
4
  shell,
5
5
  spawn,
6
6
  writeFile
7
- } from "./chunk-PASFWG7S.js";
7
+ } from "./chunk-AWDWJ2YJ.js";
8
8
 
9
9
  // src/harnesses/basic.ts
10
10
  var basicTools = { shell, readFile, writeFile, listFiles };
package/dist/contexts.js CHANGED
@@ -2,7 +2,7 @@ import {
2
2
  createDockerContext,
3
3
  createProcessContext,
4
4
  createSandboxContext
5
- } from "./chunk-SZA4FKW5.js";
5
+ } from "./chunk-2EQT4EHD.js";
6
6
  export {
7
7
  createDockerContext,
8
8
  createProcessContext,
@@ -1,4 +1,4 @@
1
1
  import 'hookable';
2
- export { H as Harness, p as HarnessConfig, _ as ToolContext, $ as ToolDef, a2 as ToolMap, aP as basic, aQ as basicTools, aA as defineHarness, aF as noTools } from './agent-D-ZFMbSd.js';
2
+ export { H as Harness, p as HarnessConfig, _ as ToolContext, $ as ToolDef, a2 as ToolMap, aP as basic, aQ as basicTools, aA as defineHarness, aF as noTools } from './agent-DFkSTVKm.js';
3
3
  import './types-BpvTmawk.js';
4
4
  import '@modelcontextprotocol/sdk/client/index.js';
package/dist/harnesses.js CHANGED
@@ -3,12 +3,12 @@ import {
3
3
  basic_default,
4
4
  defineHarness,
5
5
  noTools
6
- } from "./chunk-OVQ4N64O.js";
7
- import "./chunk-PASFWG7S.js";
8
- import "./chunk-BCXXXJ3G.js";
9
- import "./chunk-SZA4FKW5.js";
10
- import "./chunk-PJUUYBKF.js";
11
- import "./chunk-7JTBBZ2U.js";
6
+ } from "./chunk-VUVLOTEY.js";
7
+ import "./chunk-AWDWJ2YJ.js";
8
+ import "./chunk-DCYJYM3E.js";
9
+ import "./chunk-2EQT4EHD.js";
10
+ import "./chunk-IJORSHFI.js";
11
+ import "./chunk-LNN5UTS2.js";
12
12
  export {
13
13
  basic_default as basic,
14
14
  basicTools,
package/dist/index.d.ts CHANGED
@@ -1,11 +1,11 @@
1
- import { d as AgentHooks } from './agent-D-ZFMbSd.js';
2
- export { ad as ActivationVia, ae as ActiveSkill, A as Agent, a as AgentAbortedError, b as AgentBehavior, c as AgentContextExceededError, e as AgentOptions, f as AgentProviderError, g as AgentRunOptions, h as AgentStats, i as AgentToolNotAllowedError, j as AnthropicParams, C as CONTEXT_EXCEEDED_MESSAGE_PATTERNS, k as CerebrasParams, m as ClassifiedError, n as ClassifiedErrorKind, o as CreateSessionOptions, af as DeactivationReason, ag as FileMapAdapter, ah as FileMapStoreOptions, H as Harness, p as HarnessConfig, I as ImageContent, M as McpConnection, q as McpServerConfig, r as McpToolHookContext, O as OAuthRefreshHookContext, ai as OpenAICompatAuthHeader, aj as OpenAICompatHttpError, ak as OpenAICompatParams, s as OpenAIParams, t as OpenRouterParams, P as PromptDocumentPart, u as PromptImagePart, v as PromptPart, w as PromptTextPart, x as Provider, y as ProviderCapabilities, R as RemoteStoreOptions, z as RunHookMap, S as Session, B as SessionContentBlock, D as SessionData, E as SessionEndStatus, F as SessionHookContext, G as SessionMessage, J as SessionRun, K as SessionStore, L as SessionTurn, al as SkillActivationState, am as SkillActivationStateOptions, N as SkillConfig, an as SkillDiagnostic, Q as SkillResource, ao as SkillSource, T as SkillsConfig, U as SpawnHookContext, V as StreamCallbacks, W as StreamHookContext, X as StreamOptions, Y as ThinkingLevel, Z as ToolCall, _ as ToolContext, $ as ToolDef, a0 as ToolExecutionMode, a1 as ToolHookContext, a2 as ToolMap, a3 as ToolResult, a4 as ToolResultContent, a5 as ToolResultImageContent, a6 as ToolResultTextContent, a7 as ToolSpec, a8 as TurnFinishReason, a9 as TurnResult, aa as TurnUsage, ap as anthropic, aq as autoDetectAndConvert, ar as cerebras, as as classifyOpenAICompatError, at as connectMcpServers, au as createAgent, av as createFileMapStore, aw as createMemoryStore, ax as createRemoteStore, ay as createSession, az as createSkillActivationState, aA as defineHarness, aB as fromAnthropic, aC as fromOpenAI, aD as loadSession, aE as mapOAIFinishReason, ab as matchesContextExceeded, aF as noTools, aG as normalizeMcpBlocks, aH as normalizeMcpServers, aI as openai, aJ as openaiCompat, aK as openrouter, aL as resultToString, aM as toAnthropic, aN as toOpenAI, aO as toTypedError, ac as toolResultToText } from './agent-D-ZFMbSd.js';
1
+ import { d as AgentHooks } from './agent-DFkSTVKm.js';
2
+ export { ad as ActivationVia, ae as ActiveSkill, A as Agent, a as AgentAbortedError, b as AgentBehavior, c as AgentContextExceededError, e as AgentOptions, f as AgentProviderError, g as AgentRunOptions, h as AgentStats, i as AgentToolNotAllowedError, j as AnthropicParams, C as CONTEXT_EXCEEDED_MESSAGE_PATTERNS, k as CerebrasParams, m as ClassifiedError, n as ClassifiedErrorKind, o as CreateSessionOptions, af as DeactivationReason, ag as FileMapAdapter, ah as FileMapStoreOptions, H as Harness, p as HarnessConfig, I as ImageContent, M as McpConnection, q as McpServerConfig, r as McpToolHookContext, O as OAuthRefreshHookContext, ai as OpenAICompatAuthHeader, aj as OpenAICompatHttpError, ak as OpenAICompatParams, s as OpenAIParams, t as OpenRouterParams, P as PromptDocumentPart, u as PromptImagePart, v as PromptPart, w as PromptTextPart, x as Provider, y as ProviderCapabilities, R as RemoteStoreOptions, z as RunHookMap, S as Session, B as SessionContentBlock, D as SessionData, E as SessionEndStatus, F as SessionHookContext, G as SessionMessage, J as SessionRun, K as SessionStore, L as SessionTurn, al as SkillActivationState, am as SkillActivationStateOptions, N as SkillConfig, an as SkillDiagnostic, Q as SkillResource, ao as SkillSource, T as SkillsConfig, U as SpawnHookContext, V as StreamCallbacks, W as StreamHookContext, X as StreamOptions, Y as ThinkingLevel, Z as ToolCall, _ as ToolContext, $ as ToolDef, a0 as ToolExecutionMode, a1 as ToolHookContext, a2 as ToolMap, a3 as ToolResult, a4 as ToolResultContent, a5 as ToolResultImageContent, a6 as ToolResultTextContent, a7 as ToolSpec, a8 as TurnFinishReason, a9 as TurnResult, aa as TurnUsage, ap as anthropic, aq as autoDetectAndConvert, ar as cerebras, as as classifyOpenAICompatError, at as connectMcpServers, au as createAgent, av as createFileMapStore, aw as createMemoryStore, ax as createRemoteStore, ay as createSession, az as createSkillActivationState, aA as defineHarness, aB as fromAnthropic, aC as fromOpenAI, aD as loadSession, aE as mapOAIFinishReason, ab as matchesContextExceeded, aF as noTools, aG as normalizeMcpBlocks, aH as normalizeMcpServers, aI as openai, aJ as openaiCompat, aK as openrouter, aL as resultToString, aM as toAnthropic, aN as toOpenAI, aO as toTypedError, ac as toolResultToText } from './agent-DFkSTVKm.js';
3
3
  export { createDockerContext, createProcessContext } from './contexts.js';
4
4
  export { S as SandboxProvider, c as createSandboxContext } from './sandbox-CW72eLDP.js';
5
5
  export { C as ContextCapabilities, a as ContextType, E as ExecResult, b as ExecutionContext, c as ExecutionHandle, S as SpawnConfig } from './types-BpvTmawk.js';
6
6
  export { IMPLICITLY_ALLOWED_SKILL_TOOLS, SkillValidationIssue, SkillValidationResult, SourcedScanPath, buildCatalog, defineSkill, discoverSkills, installAllowedToolsGate, interpolateShellCommands, isToolAllowedByUnion, matchesAllowedTool, mergeSkillsConfig, parseAllowedToolPattern, parseSkillFile, resolveSkills, validateResourcePath, validateSkillForWrite, validateSkillName, writeSkillToDisk, writeSkillsToDisk } from './skills.js';
7
- export { S as SkillsReadToolOptions, a as SkillsRunScriptToolOptions, b as SkillsUseToolOptions, c as createSkillsReadTool, d as createSkillsRunScriptTool, e as createSkillsUseTool, g as glob } from './skills-use-C4KFVla0.js';
8
- export { C as ChildAgent, I as InteractionToolOptions, S as SpawnToolOptions, a as SpawnToolState, c as createInteractionTool, b as createSpawnTool, s as spawn } from './spawn-RoqpjYLZ.js';
7
+ export { S as SkillsReadToolOptions, a as SkillsRunScriptToolOptions, b as SkillsUseToolOptions, c as createSkillsReadTool, d as createSkillsRunScriptTool, e as createSkillsUseTool, g as glob } from './skills-use-DWprxufr.js';
8
+ export { C as ChildAgent, I as InteractionToolOptions, S as SpawnToolOptions, a as SpawnToolState, c as createInteractionTool, b as createSpawnTool, s as spawn } from './spawn-CW5GEK-T.js';
9
9
  import { Hookable } from 'hookable';
10
10
  import '@modelcontextprotocol/sdk/client/index.js';
11
11