extrait 0.6.1 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -700,36 +700,36 @@ function unwrap(schema) {
700
700
  let optional = false;
701
701
  let nullable = false;
702
702
  while (true) {
703
- const typeName = current?._def?.type;
703
+ const typeName = current?.def?.type;
704
704
  if (!typeName) {
705
705
  break;
706
706
  }
707
707
  if (typeName === "optional") {
708
708
  optional = true;
709
- current = current._def?.innerType ?? current;
709
+ current = current.def?.innerType ?? current;
710
710
  continue;
711
711
  }
712
712
  if (typeName === "default") {
713
713
  optional = true;
714
- current = current._def?.innerType ?? current;
714
+ current = current.def?.innerType ?? current;
715
715
  continue;
716
716
  }
717
717
  if (typeName === "nullable") {
718
718
  nullable = true;
719
- current = current._def?.innerType ?? current;
719
+ current = current.def?.innerType ?? current;
720
720
  continue;
721
721
  }
722
722
  if (typeName === "pipe") {
723
- const outType = current._def?.out?._def?.type;
723
+ const outType = current.def?.out?.def?.type;
724
724
  if (outType === "transform") {
725
- current = current._def?.in ?? current;
725
+ current = current.def?.in ?? current;
726
726
  } else {
727
- current = current._def?.out ?? current;
727
+ current = current.def?.out ?? current;
728
728
  }
729
729
  continue;
730
730
  }
731
731
  if (typeName === "catch" || typeName === "readonly") {
732
- current = current._def?.innerType ?? current;
732
+ current = current.def?.innerType ?? current;
733
733
  continue;
734
734
  }
735
735
  break;
@@ -745,7 +745,7 @@ function formatCore(schema, depth, seen) {
745
745
  return "unknown";
746
746
  }
747
747
  seen.add(schema);
748
- const typeName = schema?._def?.type;
748
+ const typeName = schema?.def?.type;
749
749
  switch (typeName) {
750
750
  case "string":
751
751
  return "string";
@@ -770,44 +770,44 @@ function formatCore(schema, depth, seen) {
770
770
  case "void":
771
771
  return "void";
772
772
  case "literal": {
773
- const value = schema._def?.values?.[0];
773
+ const value = schema.def?.values?.[0];
774
774
  return JSON.stringify(value);
775
775
  }
776
776
  case "enum": {
777
- const entries = schema._def?.entries;
777
+ const entries = schema.def?.entries;
778
778
  const values = Object.values(entries ?? {});
779
779
  const unique = [...new Set(values.filter((v) => typeof v !== "string" || Number.isNaN(Number(v))))];
780
780
  return unique.map((v) => JSON.stringify(v)).join(" | ") || "string";
781
781
  }
782
782
  case "array": {
783
- const inner = formatType(schema._def?.element ?? schema, depth, seen);
783
+ const inner = formatType(schema.def?.element ?? schema, depth, seen);
784
784
  return requiresParentheses(inner) ? `(${inner})[]` : `${inner}[]`;
785
785
  }
786
786
  case "tuple": {
787
- const items = (schema._def?.items ?? []).map((item) => formatType(item, depth, seen));
787
+ const items = (schema.def?.items ?? []).map((item) => formatType(item, depth, seen));
788
788
  return `[${items.join(", ")}]`;
789
789
  }
790
790
  case "union": {
791
- const options = (schema._def?.options ?? []).map((option) => formatType(option, depth, seen));
791
+ const options = (schema.def?.options ?? []).map((option) => formatType(option, depth, seen));
792
792
  return options.join(" | ") || "unknown";
793
793
  }
794
794
  case "intersection": {
795
- const left = formatType(schema._def?.left ?? schema, depth, seen);
796
- const right = formatType(schema._def?.right ?? schema, depth, seen);
795
+ const left = formatType(schema.def?.left ?? schema, depth, seen);
796
+ const right = formatType(schema.def?.right ?? schema, depth, seen);
797
797
  return `${left} & ${right}`;
798
798
  }
799
799
  case "record": {
800
- const keyType = formatType(schema._def?.keyType ?? schema, depth, seen);
801
- const valueType = formatType(schema._def?.valueType ?? schema, depth, seen);
800
+ const keyType = formatType(schema.def?.keyType ?? schema, depth, seen);
801
+ const valueType = formatType(schema.def?.valueType ?? schema, depth, seen);
802
802
  return `Record<${keyType}, ${valueType}>`;
803
803
  }
804
804
  case "map": {
805
- const keyType = formatType(schema._def?.keyType ?? schema, depth, seen);
806
- const valueType = formatType(schema._def?.valueType ?? schema, depth, seen);
805
+ const keyType = formatType(schema.def?.keyType ?? schema, depth, seen);
806
+ const valueType = formatType(schema.def?.valueType ?? schema, depth, seen);
807
807
  return `Map<${keyType}, ${valueType}>`;
808
808
  }
809
809
  case "set": {
810
- const valueType = formatType(schema._def?.valueType ?? schema, depth, seen);
810
+ const valueType = formatType(schema.def?.valueType ?? schema, depth, seen);
811
811
  return `Set<${valueType}>`;
812
812
  }
813
813
  case "object":
@@ -821,7 +821,7 @@ function formatCore(schema, depth, seen) {
821
821
  function formatObject(schema, depth, seen) {
822
822
  const indent = " ".repeat(depth);
823
823
  const innerIndent = " ".repeat(depth + 1);
824
- const rawShape = schema._def?.shape;
824
+ const rawShape = schema.def?.shape;
825
825
  const shape = typeof rawShape === "function" ? rawShape() : rawShape ?? {};
826
826
  const entries = Object.entries(shape);
827
827
  if (entries.length === 0) {
@@ -847,27 +847,27 @@ function requiresParentheses(typeText) {
847
847
  return typeText.includes(" | ") || typeText.includes(" & ");
848
848
  }
849
849
  function isIntegerNumber(schema) {
850
- const checks = schema._def?.checks ?? [];
850
+ const checks = schema.def?.checks ?? [];
851
851
  return checks.some((check) => check.isInt === true);
852
852
  }
853
853
  function readSchemaDescription(schema) {
854
854
  let current = schema;
855
- while (current?._def?.type) {
855
+ while (current?.def?.type) {
856
856
  const desc = current.description;
857
857
  if (typeof desc === "string" && desc.trim().length > 0) {
858
858
  return sanitizeDescription(desc);
859
859
  }
860
- const typeName = current._def.type;
860
+ const typeName = current.def.type;
861
861
  if (typeName === "optional" || typeName === "default" || typeName === "nullable") {
862
- current = current._def.innerType ?? current;
862
+ current = current.def.innerType ?? current;
863
863
  continue;
864
864
  }
865
865
  if (typeName === "pipe") {
866
- current = current._def.in ?? current;
866
+ current = current.def.in ?? current;
867
867
  continue;
868
868
  }
869
869
  if (typeName === "catch" || typeName === "readonly") {
870
- current = current._def.innerType ?? current;
870
+ current = current.def.innerType ?? current;
871
871
  continue;
872
872
  }
873
873
  break;
@@ -1130,7 +1130,7 @@ function findSSEBoundary(buffer) {
1130
1130
  }
1131
1131
 
1132
1132
  // src/providers/mcp-runtime.ts
1133
- var DEFAULT_MAX_TOOL_ROUNDS = 8;
1133
+ var DEFAULT_MAX_TOOL_ROUNDS = 100;
1134
1134
  async function resolveMCPToolset(clients) {
1135
1135
  if (!Array.isArray(clients) || clients.length === 0) {
1136
1136
  return {
@@ -1777,6 +1777,7 @@ async function completeWithChatCompletionsWithMCP(options, fetcher, path, reques
1777
1777
  let lastPayload;
1778
1778
  const toolCalls = [];
1779
1779
  const toolExecutions = [];
1780
+ const reasoningBlocks = [];
1780
1781
  for (let round = 1;round <= maxToolRounds + 1; round += 1) {
1781
1782
  const mcpToolset = await resolveMCPToolset(request.mcpClients);
1782
1783
  const transportTools = toProviderFunctionTools(mcpToolset);
@@ -1807,14 +1808,17 @@ async function completeWithChatCompletionsWithMCP(options, fetcher, path, reques
1807
1808
  finishReason = pickFinishReason(payload);
1808
1809
  const assistantMessage = pickAssistantMessage(payload);
1809
1810
  const calledTools = pickChatToolCalls(payload);
1811
+ const roundReasoning = pickAssistantReasoning(payload);
1812
+ pushReasoningBlock(reasoningBlocks, round, roundReasoning);
1810
1813
  if (!assistantMessage) {
1811
1814
  throw new Error("No assistant message in OpenAI-compatible response.");
1812
1815
  }
1813
1816
  if (calledTools.length === 0) {
1814
- const reasoning = pickAssistantReasoning(payload);
1817
+ const reasoning = joinReasoningBlocks(reasoningBlocks) || undefined;
1815
1818
  return {
1816
1819
  text: pickAssistantText(payload),
1817
- reasoning: reasoning.length > 0 ? reasoning : undefined,
1820
+ reasoning,
1821
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
1818
1822
  raw: payload,
1819
1823
  usage: aggregatedUsage,
1820
1824
  finishReason,
@@ -1842,10 +1846,8 @@ async function completeWithChatCompletionsWithMCP(options, fetcher, path, reques
1842
1846
  }
1843
1847
  return {
1844
1848
  text: pickAssistantText(lastPayload ?? {}),
1845
- reasoning: (() => {
1846
- const value = pickAssistantReasoning(lastPayload ?? {});
1847
- return value.length > 0 ? value : undefined;
1848
- })(),
1849
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
1850
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
1849
1851
  raw: lastPayload,
1850
1852
  usage: aggregatedUsage,
1851
1853
  finishReason,
@@ -1893,6 +1895,7 @@ async function completeWithResponsesAPIWithMCP(options, fetcher, path, request)
1893
1895
  let lastPayload;
1894
1896
  const executedToolCalls = [];
1895
1897
  const toolExecutions = [];
1898
+ const reasoningBlocks = [];
1896
1899
  for (let round = 1;round <= maxToolRounds + 1; round += 1) {
1897
1900
  const mcpToolset = await resolveMCPToolset(request.mcpClients);
1898
1901
  const transportTools = toResponsesTools(toProviderFunctionTools(mcpToolset));
@@ -1922,12 +1925,15 @@ async function completeWithResponsesAPIWithMCP(options, fetcher, path, request)
1922
1925
  lastPayload = payload;
1923
1926
  aggregatedUsage = mergeUsage(aggregatedUsage, pickUsage(payload));
1924
1927
  finishReason = pickResponsesFinishReason(payload) ?? finishReason;
1928
+ pushReasoningBlock(reasoningBlocks, round, pickResponsesReasoning(payload));
1925
1929
  const providerToolCalls = pickResponsesToolCalls(payload);
1926
1930
  const functionCalls = providerToolCalls.filter((toolCall) => toolCall.type === "function" && typeof toolCall.id === "string" && typeof toolCall.name === "string");
1927
1931
  if (functionCalls.length === 0) {
1928
1932
  const text = pickResponsesText(payload) || pickAssistantText(payload);
1929
1933
  return {
1930
1934
  text,
1935
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
1936
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
1931
1937
  raw: payload,
1932
1938
  usage: aggregatedUsage,
1933
1939
  finishReason,
@@ -1955,6 +1961,8 @@ async function completeWithResponsesAPIWithMCP(options, fetcher, path, request)
1955
1961
  }
1956
1962
  return {
1957
1963
  text: pickResponsesText(lastPayload ?? {}) || pickAssistantText(lastPayload ?? {}),
1964
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
1965
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
1958
1966
  raw: lastPayload,
1959
1967
  usage: aggregatedUsage,
1960
1968
  finishReason,
@@ -1970,9 +1978,9 @@ async function streamWithChatCompletionsWithMCP(options, fetcher, path, request,
1970
1978
  let lastPayload;
1971
1979
  const executedToolCalls = [];
1972
1980
  const toolExecutions = [];
1981
+ const reasoningBlocks = [];
1973
1982
  callbacks.onStart?.();
1974
1983
  let lastRoundText = "";
1975
- let lastRoundReasoning = "";
1976
1984
  for (let round = 1;round <= maxToolRounds + 1; round += 1) {
1977
1985
  const mcpToolset = await resolveMCPToolset(request.mcpClients);
1978
1986
  const transportTools = toProviderFunctionTools(mcpToolset);
@@ -2034,6 +2042,7 @@ async function streamWithChatCompletionsWithMCP(options, fetcher, path, request,
2034
2042
  const chunk = {
2035
2043
  textDelta: delta,
2036
2044
  reasoningDelta: reasoningDelta || undefined,
2045
+ turnIndex: round,
2037
2046
  raw: json,
2038
2047
  usage: chunkUsage,
2039
2048
  finishReason: chunkFinishReason
@@ -2046,22 +2055,41 @@ async function streamWithChatCompletionsWithMCP(options, fetcher, path, request,
2046
2055
  finishReason = roundFinishReason;
2047
2056
  }
2048
2057
  const calledTools = buildOpenAIStreamToolCalls(streamedToolCalls);
2058
+ pushReasoningBlock(reasoningBlocks, round, roundReasoning);
2059
+ request.onTurnTransition?.({
2060
+ turnIndex: round,
2061
+ kind: "reasoningComplete",
2062
+ reasoningText: roundReasoning
2063
+ });
2049
2064
  if (calledTools.length === 0) {
2050
2065
  const out2 = {
2051
2066
  text: roundText,
2052
- reasoning: roundReasoning.length > 0 ? roundReasoning : undefined,
2067
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
2068
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
2053
2069
  raw: lastPayload,
2054
2070
  usage: aggregatedUsage,
2055
2071
  finishReason,
2056
2072
  toolCalls: executedToolCalls.length > 0 ? executedToolCalls : undefined,
2057
2073
  toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
2058
2074
  };
2075
+ request.onTurnTransition?.({ turnIndex: round, kind: "streamEnd" });
2059
2076
  callbacks.onComplete?.(out2);
2060
2077
  return out2;
2061
2078
  }
2062
2079
  if (round > maxToolRounds) {
2063
2080
  throw new Error(`Tool call loop exceeded maxToolRounds (${maxToolRounds}).`);
2064
2081
  }
2082
+ request.onTurnTransition?.({
2083
+ turnIndex: round,
2084
+ kind: "toolCallsEmit",
2085
+ toolCalls: calledTools
2086
+ });
2087
+ callbacks.onChunk?.({
2088
+ textDelta: "",
2089
+ turnIndex: round,
2090
+ toolCalls: calledTools,
2091
+ finishReason: roundFinishReason
2092
+ });
2065
2093
  const outputs = await executeMCPToolCalls(calledTools, mcpToolset, {
2066
2094
  round,
2067
2095
  request,
@@ -2070,8 +2098,8 @@ async function streamWithChatCompletionsWithMCP(options, fetcher, path, request,
2070
2098
  });
2071
2099
  executedToolCalls.push(...outputs.map((entry) => entry.call));
2072
2100
  toolExecutions.push(...outputs.map((entry) => entry.execution));
2101
+ request.onTurnTransition?.({ turnIndex: round, kind: "toolResultsReceived" });
2073
2102
  lastRoundText = roundText;
2074
- lastRoundReasoning = roundReasoning;
2075
2103
  const assistantMessage = buildOpenAIAssistantToolMessage(roundText, calledTools, {
2076
2104
  reasoning: roundReasoning,
2077
2105
  reasoningFieldName
@@ -2085,13 +2113,15 @@ async function streamWithChatCompletionsWithMCP(options, fetcher, path, request,
2085
2113
  }
2086
2114
  const out = {
2087
2115
  text: lastRoundText,
2088
- reasoning: lastRoundReasoning.length > 0 ? lastRoundReasoning : undefined,
2116
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
2117
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
2089
2118
  raw: lastPayload,
2090
2119
  usage: aggregatedUsage,
2091
2120
  finishReason,
2092
2121
  toolCalls: executedToolCalls.length > 0 ? executedToolCalls : undefined,
2093
2122
  toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
2094
2123
  };
2124
+ request.onTurnTransition?.({ turnIndex: maxToolRounds + 1, kind: "streamEnd" });
2095
2125
  callbacks.onComplete?.(out);
2096
2126
  return out;
2097
2127
  }
@@ -2174,6 +2204,7 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2174
2204
  let lastPayload;
2175
2205
  const executedToolCalls = [];
2176
2206
  const toolExecutions = [];
2207
+ const reasoningBlocks = [];
2177
2208
  callbacks.onStart?.();
2178
2209
  for (let round = 1;round <= maxToolRounds + 1; round += 1) {
2179
2210
  const mcpToolset = await resolveMCPToolset(request.mcpClients);
@@ -2202,6 +2233,7 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2202
2233
  throw new Error(`HTTP ${response.status}: ${message}`);
2203
2234
  }
2204
2235
  let roundText = "";
2236
+ let roundReasoning = "";
2205
2237
  let roundUsage;
2206
2238
  let roundFinishReason;
2207
2239
  let roundPayload;
@@ -2220,6 +2252,7 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2220
2252
  lastPayload = payload;
2221
2253
  }
2222
2254
  const delta = pickResponsesStreamTextDelta(json);
2255
+ const reasoningDelta = pickResponsesStreamReasoningDelta(json);
2223
2256
  const chunkUsage = pickResponsesStreamUsage(json);
2224
2257
  const chunkFinishReason = pickResponsesStreamFinishReason(json);
2225
2258
  collectResponsesStreamToolCalls(json, streamedToolCalls);
@@ -2231,9 +2264,14 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2231
2264
  roundText += delta;
2232
2265
  callbacks.onToken?.(delta);
2233
2266
  }
2234
- if (delta || chunkUsage || chunkFinishReason) {
2267
+ if (reasoningDelta) {
2268
+ roundReasoning += reasoningDelta;
2269
+ }
2270
+ if (delta || reasoningDelta || chunkUsage || chunkFinishReason) {
2235
2271
  const chunk = {
2236
2272
  textDelta: delta,
2273
+ reasoningDelta: reasoningDelta || undefined,
2274
+ turnIndex: round,
2237
2275
  raw: json,
2238
2276
  usage: chunkUsage,
2239
2277
  finishReason: chunkFinishReason
@@ -2249,25 +2287,48 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2249
2287
  finishReason = pickResponsesFinishReason(roundPayload) ?? finishReason;
2250
2288
  }
2251
2289
  const payloadToolCalls = roundPayload ? pickResponsesToolCalls(roundPayload) : [];
2290
+ if (roundPayload && roundReasoning.length === 0) {
2291
+ roundReasoning = pickResponsesReasoning(roundPayload);
2292
+ }
2252
2293
  const streamedCalls = buildResponsesStreamToolCalls(streamedToolCalls);
2253
2294
  const providerToolCalls = payloadToolCalls.length > 0 ? payloadToolCalls : streamedCalls;
2254
2295
  const functionCalls = providerToolCalls.filter((toolCall) => toolCall.type === "function" && typeof toolCall.id === "string" && typeof toolCall.name === "string");
2296
+ pushReasoningBlock(reasoningBlocks, round, roundReasoning);
2297
+ request.onTurnTransition?.({
2298
+ turnIndex: round,
2299
+ kind: "reasoningComplete",
2300
+ reasoningText: roundReasoning
2301
+ });
2255
2302
  if (functionCalls.length === 0) {
2256
2303
  const finalText = roundText.length > 0 ? roundText : roundPayload ? pickResponsesText(roundPayload) || pickAssistantText(roundPayload) : "";
2257
2304
  const out2 = {
2258
2305
  text: finalText,
2306
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
2307
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
2259
2308
  raw: roundPayload ?? lastPayload,
2260
2309
  usage: aggregatedUsage,
2261
2310
  finishReason,
2262
2311
  toolCalls: executedToolCalls.length > 0 ? executedToolCalls : undefined,
2263
2312
  toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
2264
2313
  };
2314
+ request.onTurnTransition?.({ turnIndex: round, kind: "streamEnd" });
2265
2315
  callbacks.onComplete?.(out2);
2266
2316
  return out2;
2267
2317
  }
2268
2318
  if (round > maxToolRounds) {
2269
2319
  throw new Error(`Tool call loop exceeded maxToolRounds (${maxToolRounds}).`);
2270
2320
  }
2321
+ request.onTurnTransition?.({
2322
+ turnIndex: round,
2323
+ kind: "toolCallsEmit",
2324
+ toolCalls: functionCalls
2325
+ });
2326
+ callbacks.onChunk?.({
2327
+ textDelta: "",
2328
+ turnIndex: round,
2329
+ toolCalls: functionCalls,
2330
+ finishReason: roundFinishReason
2331
+ });
2271
2332
  const outputs = await executeMCPToolCalls(functionCalls, mcpToolset, {
2272
2333
  round,
2273
2334
  request,
@@ -2276,6 +2337,7 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2276
2337
  });
2277
2338
  executedToolCalls.push(...outputs.map((entry) => entry.call));
2278
2339
  toolExecutions.push(...outputs.map((entry) => entry.execution));
2340
+ request.onTurnTransition?.({ turnIndex: round, kind: "toolResultsReceived" });
2279
2341
  input = outputs.map((entry) => ({
2280
2342
  type: "function_call_output",
2281
2343
  call_id: entry.call.id,
@@ -2285,12 +2347,15 @@ async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, ca
2285
2347
  }
2286
2348
  const out = {
2287
2349
  text: pickResponsesText(lastPayload ?? {}) || pickAssistantText(lastPayload ?? {}),
2350
+ reasoning: joinReasoningBlocks(reasoningBlocks) || undefined,
2351
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
2288
2352
  raw: lastPayload,
2289
2353
  usage: aggregatedUsage,
2290
2354
  finishReason,
2291
2355
  toolCalls: executedToolCalls.length > 0 ? executedToolCalls : undefined,
2292
2356
  toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
2293
2357
  };
2358
+ request.onTurnTransition?.({ turnIndex: maxToolRounds + 1, kind: "streamEnd" });
2294
2359
  callbacks.onComplete?.(out);
2295
2360
  return out;
2296
2361
  }
@@ -2575,6 +2640,20 @@ function pickResponsesStreamTextDelta(payload) {
2575
2640
  }
2576
2641
  return "";
2577
2642
  }
2643
+ function pickResponsesStreamReasoningDelta(payload) {
2644
+ const eventType = pickString(payload.type) ?? "";
2645
+ if (!eventType.includes("reasoning") && !eventType.includes("thinking")) {
2646
+ return "";
2647
+ }
2648
+ const direct = pickString(payload.delta);
2649
+ if (direct) {
2650
+ return direct;
2651
+ }
2652
+ if (isRecord2(payload.delta)) {
2653
+ return pickReasoningText(payload.delta) || pickString(payload.delta.text) || pickString(payload.delta.summary_text) || "";
2654
+ }
2655
+ return "";
2656
+ }
2578
2657
  function pickResponsesStreamUsage(payload) {
2579
2658
  const direct = pickUsage(payload);
2580
2659
  if (direct) {
@@ -2709,6 +2788,30 @@ function pickResponsesText(payload) {
2709
2788
  }).join("");
2710
2789
  }).join("");
2711
2790
  }
2791
+ function pickResponsesReasoning(payload) {
2792
+ const direct = pickReasoningText(payload);
2793
+ if (direct) {
2794
+ return direct;
2795
+ }
2796
+ const output = payload.output;
2797
+ if (!Array.isArray(output)) {
2798
+ return "";
2799
+ }
2800
+ return output.map((item) => {
2801
+ if (!isRecord2(item)) {
2802
+ return "";
2803
+ }
2804
+ const itemReasoning = pickReasoningText(item);
2805
+ if (itemReasoning) {
2806
+ return itemReasoning;
2807
+ }
2808
+ const itemType = pickString(item.type) ?? "";
2809
+ if ((itemType.includes("reasoning") || itemType.includes("thinking")) && Array.isArray(item.content)) {
2810
+ return item.content.map((part) => isRecord2(part) ? pickTextLike(part) : "").join("");
2811
+ }
2812
+ return "";
2813
+ }).join("");
2814
+ }
2712
2815
  function pickAssistantText(payload) {
2713
2816
  const message = pickAssistantMessage(payload);
2714
2817
  if (message) {
@@ -2729,6 +2832,18 @@ function pickAssistantText(payload) {
2729
2832
  function pickReasoningText(value) {
2730
2833
  return pickTextLike(value.reasoning) || pickTextLike(value.reasoning_content);
2731
2834
  }
2835
+ function pushReasoningBlock(blocks, turnIndex, text) {
2836
+ const clean = text?.replace(/<\/?think\s*>/gi, "").trim();
2837
+ if (!clean) {
2838
+ return;
2839
+ }
2840
+ blocks.push({ turnIndex, text: clean });
2841
+ }
2842
+ function joinReasoningBlocks(blocks) {
2843
+ return blocks.map((block) => block.text).filter(Boolean).join(`
2844
+
2845
+ `);
2846
+ }
2732
2847
  function pickTextFromOpenAIContent(value) {
2733
2848
  return pickTextLike(value);
2734
2849
  }
@@ -2909,6 +3024,7 @@ async function completeWithMCPToolLoop(options, fetcher, path, request) {
2909
3024
  let lastPayload;
2910
3025
  const toolCalls = [];
2911
3026
  const toolExecutions = [];
3027
+ const reasoningBlocks = [];
2912
3028
  for (let round = 1;round <= maxToolRounds + 1; round += 1) {
2913
3029
  const mcpToolset = await resolveMCPToolset(request.mcpClients);
2914
3030
  const tools = toAnthropicTools(toProviderFunctionTools(mcpToolset));
@@ -2938,9 +3054,12 @@ async function completeWithMCPToolLoop(options, fetcher, path, request) {
2938
3054
  finishReason = pickFinishReason2(payload);
2939
3055
  const content = Array.isArray(payload.content) ? payload.content : [];
2940
3056
  const calledTools = pickAnthropicToolCalls(payload).filter((call) => call.type === "function");
3057
+ pushReasoningBlock2(reasoningBlocks, round, extractAnthropicReasoning(payload));
2941
3058
  if (calledTools.length === 0) {
2942
3059
  return {
2943
3060
  text: extractAnthropicText(payload),
3061
+ reasoning: joinReasoningBlocks2(reasoningBlocks) || undefined,
3062
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
2944
3063
  raw: payload,
2945
3064
  usage: aggregatedUsage,
2946
3065
  finishReason,
@@ -2976,6 +3095,8 @@ async function completeWithMCPToolLoop(options, fetcher, path, request) {
2976
3095
  }
2977
3096
  return {
2978
3097
  text: extractAnthropicText(lastPayload ?? {}),
3098
+ reasoning: joinReasoningBlocks2(reasoningBlocks) || undefined,
3099
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
2979
3100
  raw: lastPayload,
2980
3101
  usage: aggregatedUsage,
2981
3102
  finishReason,
@@ -2992,6 +3113,7 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
2992
3113
  let lastPayload;
2993
3114
  const toolCalls = [];
2994
3115
  const toolExecutions = [];
3116
+ const reasoningBlocks = [];
2995
3117
  callbacks.onStart?.();
2996
3118
  for (let round = 1;round <= maxToolRounds + 1; round += 1) {
2997
3119
  const mcpToolset = await resolveMCPToolset(request.mcpClients);
@@ -3017,6 +3139,7 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
3017
3139
  throw new Error(`HTTP ${response.status}: ${message}`);
3018
3140
  }
3019
3141
  let roundText = "";
3142
+ let roundReasoning = "";
3020
3143
  let roundUsage;
3021
3144
  let roundFinishReason;
3022
3145
  const streamedToolCalls = new Map;
@@ -3030,6 +3153,7 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
3030
3153
  }
3031
3154
  lastPayload = json;
3032
3155
  const delta = pickAnthropicDelta(json);
3156
+ const reasoningDelta = pickAnthropicReasoningDelta(json);
3033
3157
  const chunkUsage = pickUsage2(json);
3034
3158
  const chunkFinishReason = pickFinishReason2(json);
3035
3159
  collectAnthropicStreamToolCalls(json, streamedToolCalls);
@@ -3041,9 +3165,14 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
3041
3165
  roundText += delta;
3042
3166
  callbacks.onToken?.(delta);
3043
3167
  }
3044
- if (delta || chunkUsage || chunkFinishReason) {
3168
+ if (reasoningDelta) {
3169
+ roundReasoning += reasoningDelta;
3170
+ }
3171
+ if (delta || reasoningDelta || chunkUsage || chunkFinishReason) {
3045
3172
  const chunk = {
3046
3173
  textDelta: delta,
3174
+ reasoningDelta: reasoningDelta || undefined,
3175
+ turnIndex: round,
3047
3176
  raw: json,
3048
3177
  usage: chunkUsage,
3049
3178
  finishReason: chunkFinishReason
@@ -3056,21 +3185,41 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
3056
3185
  finishReason = roundFinishReason;
3057
3186
  }
3058
3187
  const calledTools = buildAnthropicStreamToolCalls(streamedToolCalls);
3188
+ pushReasoningBlock2(reasoningBlocks, round, roundReasoning);
3189
+ request.onTurnTransition?.({
3190
+ turnIndex: round,
3191
+ kind: "reasoningComplete",
3192
+ reasoningText: roundReasoning
3193
+ });
3059
3194
  if (calledTools.length === 0) {
3060
3195
  const out2 = {
3061
3196
  text: roundText,
3197
+ reasoning: joinReasoningBlocks2(reasoningBlocks) || undefined,
3198
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
3062
3199
  raw: lastPayload,
3063
3200
  usage: aggregatedUsage,
3064
3201
  finishReason,
3065
3202
  toolCalls: toolCalls.length > 0 ? toolCalls : undefined,
3066
3203
  toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
3067
3204
  };
3205
+ request.onTurnTransition?.({ turnIndex: round, kind: "streamEnd" });
3068
3206
  callbacks.onComplete?.(out2);
3069
3207
  return out2;
3070
3208
  }
3071
3209
  if (round > maxToolRounds) {
3072
3210
  throw new Error(`Tool call loop exceeded maxToolRounds (${maxToolRounds}).`);
3073
3211
  }
3212
+ request.onTurnTransition?.({
3213
+ turnIndex: round,
3214
+ kind: "toolCallsEmit",
3215
+ toolCalls: calledTools
3216
+ });
3217
+ callbacks.onChunk?.({
3218
+ textDelta: "",
3219
+ turnIndex: round,
3220
+ toolCalls: calledTools,
3221
+ finishReason: roundFinishReason
3222
+ });
3074
3223
  const toolResultContent = [];
3075
3224
  const outputs = await executeMCPToolCalls(calledTools, mcpToolset, {
3076
3225
  round,
@@ -3080,6 +3229,7 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
3080
3229
  });
3081
3230
  toolCalls.push(...outputs.map((entry) => entry.call));
3082
3231
  toolExecutions.push(...outputs.map((entry) => entry.execution));
3232
+ request.onTurnTransition?.({ turnIndex: round, kind: "toolResultsReceived" });
3083
3233
  for (const entry of outputs) {
3084
3234
  toolResultContent.push({
3085
3235
  type: "tool_result",
@@ -3096,12 +3246,15 @@ async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks)
3096
3246
  }
3097
3247
  const out = {
3098
3248
  text: "",
3249
+ reasoning: joinReasoningBlocks2(reasoningBlocks) || undefined,
3250
+ reasoningBlocks: reasoningBlocks.length > 0 ? reasoningBlocks : undefined,
3099
3251
  raw: lastPayload,
3100
3252
  usage: aggregatedUsage,
3101
3253
  finishReason,
3102
3254
  toolCalls: toolCalls.length > 0 ? toolCalls : undefined,
3103
3255
  toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
3104
3256
  };
3257
+ request.onTurnTransition?.({ turnIndex: maxToolRounds + 1, kind: "streamEnd" });
3105
3258
  callbacks.onComplete?.(out);
3106
3259
  return out;
3107
3260
  }
@@ -3222,6 +3375,22 @@ function extractAnthropicText(payload) {
3222
3375
  return typeof text === "string" ? text : "";
3223
3376
  }).join("");
3224
3377
  }
3378
+ function extractAnthropicReasoning(payload) {
3379
+ const content = payload.content;
3380
+ if (!Array.isArray(content)) {
3381
+ return "";
3382
+ }
3383
+ return content.map((part) => {
3384
+ if (!isRecord2(part)) {
3385
+ return "";
3386
+ }
3387
+ const type = pickString(part.type) ?? "";
3388
+ if (type !== "thinking" && type !== "reasoning") {
3389
+ return "";
3390
+ }
3391
+ return pickString(part.thinking) ?? pickString(part.text) ?? pickString(part.reasoning) ?? "";
3392
+ }).join("");
3393
+ }
3225
3394
  function pickAnthropicToolCalls(payload) {
3226
3395
  const content = payload.content;
3227
3396
  if (!Array.isArray(content)) {
@@ -3252,6 +3421,35 @@ function pickAnthropicDelta(payload) {
3252
3421
  }
3253
3422
  return "";
3254
3423
  }
3424
+ function pickAnthropicReasoningDelta(payload) {
3425
+ const deltaObject = payload.delta;
3426
+ if (isRecord2(deltaObject)) {
3427
+ const type = pickString(deltaObject.type) ?? "";
3428
+ if (type === "thinking_delta" || type === "reasoning_delta") {
3429
+ return pickString(deltaObject.thinking) ?? pickString(deltaObject.text) ?? "";
3430
+ }
3431
+ }
3432
+ const contentBlock = payload.content_block;
3433
+ if (isRecord2(contentBlock)) {
3434
+ const type = pickString(contentBlock.type) ?? "";
3435
+ if (type === "thinking" || type === "reasoning") {
3436
+ return pickString(contentBlock.thinking) ?? pickString(contentBlock.text) ?? "";
3437
+ }
3438
+ }
3439
+ return "";
3440
+ }
3441
+ function pushReasoningBlock2(blocks, turnIndex, text) {
3442
+ const clean = text?.replace(/<\/?think\s*>/gi, "").trim();
3443
+ if (!clean) {
3444
+ return;
3445
+ }
3446
+ blocks.push({ turnIndex, text: clean });
3447
+ }
3448
+ function joinReasoningBlocks2(blocks) {
3449
+ return blocks.map((block) => block.text).filter(Boolean).join(`
3450
+
3451
+ `);
3452
+ }
3255
3453
  function collectAnthropicStreamToolCalls(payload, state) {
3256
3454
  const eventType = pickString(payload.type);
3257
3455
  if (!eventType) {
@@ -3725,6 +3923,7 @@ function normalizeStreamConfig(option) {
3725
3923
  return {
3726
3924
  enabled: option.enabled ?? true,
3727
3925
  onData: option.onData,
3926
+ onTurnTransition: option.onTurnTransition,
3728
3927
  to: option.to
3729
3928
  };
3730
3929
  }
@@ -3792,6 +3991,7 @@ async function callModel(adapter, options) {
3792
3991
  transformToolCallParams: options.request?.transformToolCallParams,
3793
3992
  unknownToolError: options.request?.unknownToolError,
3794
3993
  toolDebug: options.request?.toolDebug,
3994
+ onTurnTransition: options.stream.onTurnTransition,
3795
3995
  body: options.request?.body,
3796
3996
  signal: requestSignal
3797
3997
  };
@@ -3819,13 +4019,21 @@ async function callModel(adapter, options) {
3819
4019
  let latestFinishReason;
3820
4020
  let streamedProviderText = "";
3821
4021
  let streamedDedicatedReasoning = "";
4022
+ let currentTurnIndex;
4023
+ let currentToolCalls;
4024
+ let streamedReasoningBlocks;
3822
4025
  let lastSnapshotFingerprint;
3823
4026
  let previousSnapshotText = "";
3824
4027
  let previousSnapshotReasoning = "";
3825
4028
  const emitStreamingData = (done, usage2, finishReason2) => {
3826
- const normalized2 = normalizeModelOutput(streamedProviderText, streamedDedicatedReasoning);
4029
+ const normalized2 = normalizeModelOutput(streamedProviderText, streamedDedicatedReasoning, streamedReasoningBlocks);
3827
4030
  const snapshot = options.buildSnapshot(normalized2);
3828
- const fingerprint = toStreamDataFingerprint(snapshot);
4031
+ const fingerprint = toStreamDataFingerprint({
4032
+ snapshot,
4033
+ done,
4034
+ turnIndex: currentTurnIndex,
4035
+ toolCalls: currentToolCalls
4036
+ });
3829
4037
  if (!done && fingerprint === lastSnapshotFingerprint) {
3830
4038
  return;
3831
4039
  }
@@ -3841,7 +4049,9 @@ async function callModel(adapter, options) {
3841
4049
  snapshot,
3842
4050
  done,
3843
4051
  usage: usage2,
3844
- finishReason: finishReason2
4052
+ finishReason: finishReason2,
4053
+ turnIndex: currentTurnIndex,
4054
+ toolCalls: currentToolCalls
3845
4055
  });
3846
4056
  if (options.stream.to === "stdout" && delta.text) {
3847
4057
  process.stdout.write(delta.text);
@@ -3876,8 +4086,21 @@ async function callModel(adapter, options) {
3876
4086
  streamedDedicatedReasoning += delta;
3877
4087
  emitStreamingData(false);
3878
4088
  };
3879
- const response2 = await adapter.stream(requestPayload, {
4089
+ const streamRequestPayload = {
4090
+ ...requestPayload,
4091
+ onTurnTransition: (transition) => {
4092
+ if (transition.kind === "reasoningComplete") {
4093
+ streamedReasoningBlocks = appendReasoningBlock(streamedReasoningBlocks, transition);
4094
+ }
4095
+ options.stream.onTurnTransition?.(transition);
4096
+ }
4097
+ };
4098
+ const response2 = await adapter.stream(streamRequestPayload, {
3880
4099
  onChunk: (chunk) => {
4100
+ if (chunk.turnIndex !== undefined) {
4101
+ currentTurnIndex = chunk.turnIndex;
4102
+ }
4103
+ currentToolCalls = chunk.toolCalls;
3881
4104
  if (chunk.textDelta) {
3882
4105
  handleTextDelta(chunk.textDelta);
3883
4106
  }
@@ -3890,11 +4113,15 @@ async function callModel(adapter, options) {
3890
4113
  if (chunk.finishReason) {
3891
4114
  latestFinishReason = chunk.finishReason;
3892
4115
  }
4116
+ if (!chunk.textDelta && !chunk.reasoningDelta && (chunk.turnIndex !== undefined || chunk.toolCalls)) {
4117
+ emitStreamingData(false, chunk.usage, chunk.finishReason);
4118
+ }
3893
4119
  }
3894
4120
  });
3895
4121
  streamedProviderText = typeof response2.text === "string" ? response2.text : streamedProviderText;
3896
4122
  streamedDedicatedReasoning = typeof response2.reasoning === "string" ? response2.reasoning : streamedDedicatedReasoning;
3897
- const finalNormalized = normalizeModelOutput(streamedProviderText, streamedDedicatedReasoning);
4123
+ streamedReasoningBlocks = response2.reasoningBlocks ?? streamedReasoningBlocks;
4124
+ const finalNormalized = normalizeModelOutput(streamedProviderText, streamedDedicatedReasoning, streamedReasoningBlocks);
3898
4125
  const usage = preferLatestUsage(latestUsage, response2.usage);
3899
4126
  const finishReason = response2.finishReason ?? latestFinishReason;
3900
4127
  emitStreamingData(true, usage, finishReason);
@@ -3926,11 +4153,12 @@ async function callModel(adapter, options) {
3926
4153
  parseSource: finalNormalized.parseSource,
3927
4154
  via: "stream",
3928
4155
  usage,
3929
- finishReason
4156
+ finishReason,
4157
+ reasoningBlocks: finalNormalized.reasoningBlocks
3930
4158
  };
3931
4159
  }
3932
4160
  const response = await adapter.complete(requestPayload);
3933
- const normalized = normalizeModelOutput(response.text, response.reasoning);
4161
+ const normalized = normalizeModelOutput(response.text, response.reasoning, response.reasoningBlocks);
3934
4162
  options.observe?.(options.buildEvent({
3935
4163
  stage: "llm.response",
3936
4164
  message: "Completion response received.",
@@ -3959,10 +4187,11 @@ async function callModel(adapter, options) {
3959
4187
  parseSource: normalized.parseSource,
3960
4188
  via: "complete",
3961
4189
  usage: response.usage,
3962
- finishReason: response.finishReason
4190
+ finishReason: response.finishReason,
4191
+ reasoningBlocks: normalized.reasoningBlocks
3963
4192
  };
3964
4193
  }
3965
- function normalizeModelOutput(text, dedicatedReasoning) {
4194
+ function normalizeModelOutput(text, dedicatedReasoning, reasoningBlocks) {
3966
4195
  const sanitized = sanitizeThink(text);
3967
4196
  const visibleText = stripThinkBlocks(text, sanitized.thinkBlocks);
3968
4197
  const reasoning = joinReasoningSegments([
@@ -3972,10 +4201,29 @@ function normalizeModelOutput(text, dedicatedReasoning) {
3972
4201
  return {
3973
4202
  text: visibleText,
3974
4203
  reasoning,
4204
+ reasoningBlocks: normalizeReasoningBlocks(reasoningBlocks),
3975
4205
  thinkBlocks: sanitized.thinkBlocks,
3976
4206
  parseSource: composeParseSource(visibleText, reasoning)
3977
4207
  };
3978
4208
  }
4209
+ function normalizeReasoningBlocks(blocks) {
4210
+ if (!Array.isArray(blocks)) {
4211
+ return;
4212
+ }
4213
+ const normalized = blocks.map((block) => ({
4214
+ turnIndex: block.turnIndex,
4215
+ text: block.text.replace(RE_THINK_TAGS, "").trim()
4216
+ })).filter((block) => Number.isFinite(block.turnIndex) && block.text.length > 0);
4217
+ return normalized.length > 0 ? normalized : undefined;
4218
+ }
4219
+ function appendReasoningBlock(blocks, transition) {
4220
+ const text = transition.reasoningText?.replace(RE_THINK_TAGS, "").trim();
4221
+ if (!text) {
4222
+ return blocks;
4223
+ }
4224
+ const next = [...blocks ?? [], { turnIndex: transition.turnIndex, text }];
4225
+ return normalizeReasoningBlocks(next);
4226
+ }
3979
4227
  function composeParseSource(text, reasoning) {
3980
4228
  if (typeof reasoning !== "string" || reasoning.length === 0) {
3981
4229
  return text;
@@ -4135,7 +4383,8 @@ async function generate(adapter, promptOrOptions, callOptions) {
4135
4383
  }),
4136
4384
  buildSnapshot: (model) => ({
4137
4385
  text: model.text,
4138
- reasoning: model.reasoning
4386
+ reasoning: model.reasoning,
4387
+ ...model.reasoningBlocks ? { reasoningBlocks: model.reasoningBlocks } : {}
4139
4388
  }),
4140
4389
  debug: debugConfig,
4141
4390
  debugLabel: "generate",
@@ -4150,7 +4399,8 @@ async function generate(adapter, promptOrOptions, callOptions) {
4150
4399
  text: response.text,
4151
4400
  reasoning: response.reasoning,
4152
4401
  usage: response.usage,
4153
- finishReason: response.finishReason
4402
+ finishReason: response.finishReason,
4403
+ ...response.reasoningBlocks ? { reasoningBlocks: response.reasoningBlocks } : {}
4154
4404
  };
4155
4405
  const attempts = [attempt];
4156
4406
  normalized.observe?.({
@@ -4167,7 +4417,8 @@ async function generate(adapter, promptOrOptions, callOptions) {
4167
4417
  reasoning: attempt.reasoning,
4168
4418
  attempts,
4169
4419
  usage: aggregateUsage(attempts),
4170
- finishReason: attempt.finishReason
4420
+ finishReason: attempt.finishReason,
4421
+ ...attempt.reasoningBlocks ? { reasoningBlocks: attempt.reasoningBlocks } : {}
4171
4422
  };
4172
4423
  }
4173
4424
  function normalizeGenerateInput(promptOrOptions, callOptions) {
@@ -4987,6 +5238,7 @@ async function executeAttempt(adapter, input) {
4987
5238
  success: parsed.success,
4988
5239
  usage: response.usage,
4989
5240
  finishReason: response.finishReason,
5241
+ ...response.reasoningBlocks ? { reasoningBlocks: response.reasoningBlocks } : {},
4990
5242
  parsed
4991
5243
  };
4992
5244
  return {
@@ -5007,6 +5259,7 @@ async function callModel2(adapter, options) {
5007
5259
  buildSnapshot: (normalized) => ({
5008
5260
  text: normalized.text,
5009
5261
  reasoning: normalized.reasoning,
5262
+ ...normalized.reasoningBlocks ? { reasoningBlocks: normalized.reasoningBlocks } : {},
5010
5263
  data: parseStreamingStructuredData(normalized.parseSource) ?? null
5011
5264
  }),
5012
5265
  debugLabel: "structured"
@@ -5107,7 +5360,8 @@ function buildSuccessResult(data, attempts) {
5107
5360
  json: final?.json ?? null,
5108
5361
  attempts,
5109
5362
  usage: aggregateUsage(attempts),
5110
- finishReason: final?.finishReason
5363
+ finishReason: final?.finishReason,
5364
+ ...final?.reasoningBlocks ? { reasoningBlocks: final.reasoningBlocks } : {}
5111
5365
  };
5112
5366
  }
5113
5367
  function toStructuredError(attempt) {
@@ -5574,11 +5828,11 @@ function inferSchemaExample(schema) {
5574
5828
  }
5575
5829
  function getObjectShape(schema) {
5576
5830
  const unwrapped = unwrap2(schema).schema;
5577
- const typeName = unwrapped._def?.type;
5831
+ const typeName = unwrapped.def?.type;
5578
5832
  if (typeName !== "object") {
5579
5833
  return null;
5580
5834
  }
5581
- const rawShape = unwrapped._def?.shape;
5835
+ const rawShape = unwrapped.def?.shape;
5582
5836
  if (typeof rawShape === "function") {
5583
5837
  return rawShape();
5584
5838
  }
@@ -5586,11 +5840,11 @@ function getObjectShape(schema) {
5586
5840
  }
5587
5841
  function readDefaultValue(schema) {
5588
5842
  let current = schema;
5589
- while (current?._def?.type) {
5590
- const typeName = current._def.type;
5843
+ while (current?.def?.type) {
5844
+ const typeName = current.def.type;
5591
5845
  if (typeName === "default") {
5592
5846
  try {
5593
- const raw = current._def.defaultValue;
5847
+ const raw = current.def.defaultValue;
5594
5848
  if (typeof raw === "function") {
5595
5849
  return raw();
5596
5850
  }
@@ -5600,11 +5854,11 @@ function readDefaultValue(schema) {
5600
5854
  }
5601
5855
  }
5602
5856
  if (typeName === "optional" || typeName === "nullable" || typeName === "catch" || typeName === "readonly") {
5603
- current = current._def.innerType ?? current;
5857
+ current = current.def.innerType ?? current;
5604
5858
  continue;
5605
5859
  }
5606
5860
  if (typeName === "pipe") {
5607
- current = current._def.in ?? current;
5861
+ current = current.def.in ?? current;
5608
5862
  continue;
5609
5863
  }
5610
5864
  return;
@@ -5613,22 +5867,22 @@ function readDefaultValue(schema) {
5613
5867
  }
5614
5868
  function readSchemaDescription2(schema) {
5615
5869
  let current = schema;
5616
- while (current?._def?.type) {
5870
+ while (current?.def?.type) {
5617
5871
  const desc = current.description;
5618
5872
  if (typeof desc === "string" && desc.trim().length > 0) {
5619
5873
  return desc.trim();
5620
5874
  }
5621
- const typeName = current._def.type;
5875
+ const typeName = current.def.type;
5622
5876
  if (typeName === "optional" || typeName === "default" || typeName === "nullable") {
5623
- current = current._def.innerType ?? current;
5877
+ current = current.def.innerType ?? current;
5624
5878
  continue;
5625
5879
  }
5626
5880
  if (typeName === "catch" || typeName === "readonly") {
5627
- current = current._def.innerType ?? current;
5881
+ current = current.def.innerType ?? current;
5628
5882
  continue;
5629
5883
  }
5630
5884
  if (typeName === "pipe") {
5631
- current = current._def.in ?? current;
5885
+ current = current.def.in ?? current;
5632
5886
  continue;
5633
5887
  }
5634
5888
  break;
@@ -5638,19 +5892,19 @@ function readSchemaDescription2(schema) {
5638
5892
  function unwrap2(schema) {
5639
5893
  let current = schema;
5640
5894
  let optional = false;
5641
- while (current?._def?.type) {
5642
- const typeName = current._def.type;
5895
+ while (current?.def?.type) {
5896
+ const typeName = current.def.type;
5643
5897
  if (typeName === "optional" || typeName === "default") {
5644
5898
  optional = true;
5645
- current = current._def.innerType ?? current;
5899
+ current = current.def.innerType ?? current;
5646
5900
  continue;
5647
5901
  }
5648
5902
  if (typeName === "nullable" || typeName === "catch" || typeName === "readonly") {
5649
- current = current._def.innerType ?? current;
5903
+ current = current.def.innerType ?? current;
5650
5904
  continue;
5651
5905
  }
5652
5906
  if (typeName === "pipe") {
5653
- current = current._def.in ?? current;
5907
+ current = current.def.in ?? current;
5654
5908
  continue;
5655
5909
  }
5656
5910
  break;